summaryrefslogtreecommitdiff
path: root/IkiWiki
diff options
context:
space:
mode:
authorJonas Smedegaard <dr@jones.dk>2010-08-27 10:01:58 +0200
committerJonas Smedegaard <dr@jones.dk>2010-08-27 10:01:58 +0200
commitf398ad035b973608d380c9939ea845d8e2a0cdc2 (patch)
tree1ba1a0c94e375ab8ed609eaa57a542c6b87de5a8 /IkiWiki
parent958e5735c946263a111420fe47abe58782581e8c (diff)
parent6d213a0c739d5b34357b01a616f99197eeba6ad9 (diff)
Merge branch 'master' of git://git.ikiwiki.info
Diffstat (limited to 'IkiWiki')
-rw-r--r--IkiWiki/CGI.pm80
-rw-r--r--IkiWiki/Plugin/404.pm5
-rw-r--r--IkiWiki/Plugin/aggregate.pm18
-rw-r--r--IkiWiki/Plugin/amazon_s3.pm6
-rw-r--r--IkiWiki/Plugin/anonok.pm1
-rw-r--r--IkiWiki/Plugin/attachment.pm37
-rw-r--r--IkiWiki/Plugin/autoindex.pm43
-rw-r--r--IkiWiki/Plugin/blogspam.pm4
-rw-r--r--IkiWiki/Plugin/brokenlinks.pm37
-rw-r--r--IkiWiki/Plugin/bzr.pm98
-rw-r--r--IkiWiki/Plugin/calendar.pm363
-rw-r--r--IkiWiki/Plugin/color.pm21
-rw-r--r--IkiWiki/Plugin/comments.pm302
-rw-r--r--IkiWiki/Plugin/conditional.pm30
-rw-r--r--IkiWiki/Plugin/creole.pm1
-rw-r--r--IkiWiki/Plugin/cutpaste.pm9
-rw-r--r--IkiWiki/Plugin/cvs.pm495
-rw-r--r--IkiWiki/Plugin/darcs.pm108
-rw-r--r--IkiWiki/Plugin/date.pm34
-rw-r--r--IkiWiki/Plugin/editdiff.pm3
-rw-r--r--IkiWiki/Plugin/editpage.pm79
-rw-r--r--IkiWiki/Plugin/edittemplate.pm43
-rw-r--r--IkiWiki/Plugin/external.pm21
-rw-r--r--IkiWiki/Plugin/filecheck.pm56
-rw-r--r--IkiWiki/Plugin/flattr.pm97
-rw-r--r--IkiWiki/Plugin/format.pm10
-rw-r--r--IkiWiki/Plugin/fortune.pm1
-rw-r--r--IkiWiki/Plugin/getsource.pm94
-rw-r--r--IkiWiki/Plugin/git.pm208
-rw-r--r--IkiWiki/Plugin/google.pm15
-rw-r--r--IkiWiki/Plugin/goto.pm20
-rw-r--r--IkiWiki/Plugin/graphviz.pm15
-rw-r--r--IkiWiki/Plugin/haiku.pm1
-rw-r--r--IkiWiki/Plugin/highlight.pm3
-rw-r--r--IkiWiki/Plugin/hnb.pm5
-rw-r--r--IkiWiki/Plugin/html.pm1
-rw-r--r--IkiWiki/Plugin/htmlscrubber.pm24
-rw-r--r--IkiWiki/Plugin/htmltidy.pm4
-rw-r--r--IkiWiki/Plugin/httpauth.pm77
-rw-r--r--IkiWiki/Plugin/img.pm144
-rw-r--r--IkiWiki/Plugin/inline.pm193
-rw-r--r--IkiWiki/Plugin/link.pm106
-rw-r--r--IkiWiki/Plugin/linkmap.pm71
-rw-r--r--IkiWiki/Plugin/listdirectives.pm3
-rw-r--r--IkiWiki/Plugin/localstyle.pm35
-rw-r--r--IkiWiki/Plugin/lockedit.pm8
-rw-r--r--IkiWiki/Plugin/map.pm23
-rw-r--r--IkiWiki/Plugin/mdwn.pm7
-rw-r--r--IkiWiki/Plugin/mercurial.pm46
-rw-r--r--IkiWiki/Plugin/meta.pm115
-rw-r--r--IkiWiki/Plugin/mirrorlist.pm8
-rw-r--r--IkiWiki/Plugin/moderatedcomments.pm64
-rw-r--r--IkiWiki/Plugin/monotone.pm98
-rw-r--r--IkiWiki/Plugin/more.pm7
-rw-r--r--IkiWiki/Plugin/norcs.pm14
-rw-r--r--IkiWiki/Plugin/opendiscussion.pm8
-rw-r--r--IkiWiki/Plugin/openid.pm215
-rw-r--r--IkiWiki/Plugin/orphans.pm52
-rw-r--r--IkiWiki/Plugin/otl.pm15
-rw-r--r--IkiWiki/Plugin/pagecount.pm25
-rw-r--r--IkiWiki/Plugin/pagestats.pm56
-rw-r--r--IkiWiki/Plugin/parentlinks.pm18
-rw-r--r--IkiWiki/Plugin/passwordauth.pm64
-rw-r--r--IkiWiki/Plugin/po.pm413
-rw-r--r--IkiWiki/Plugin/poll.pm10
-rw-r--r--IkiWiki/Plugin/polygen.pm1
-rw-r--r--IkiWiki/Plugin/postsparkline.pm15
-rw-r--r--IkiWiki/Plugin/progress.pm17
-rw-r--r--IkiWiki/Plugin/rawhtml.pm1
-rw-r--r--IkiWiki/Plugin/recentchanges.pm11
-rw-r--r--IkiWiki/Plugin/relativedate.pm32
-rw-r--r--IkiWiki/Plugin/remove.pm23
-rw-r--r--IkiWiki/Plugin/rename.pm34
-rw-r--r--IkiWiki/Plugin/repolist.pm1
-rw-r--r--IkiWiki/Plugin/rsync.pm45
-rw-r--r--IkiWiki/Plugin/search.pm49
-rw-r--r--IkiWiki/Plugin/shortcut.pm1
-rw-r--r--IkiWiki/Plugin/sidebar.pm58
-rw-r--r--IkiWiki/Plugin/signinedit.pm2
-rw-r--r--IkiWiki/Plugin/skeleton.pm.example28
-rw-r--r--IkiWiki/Plugin/smiley.pm9
-rw-r--r--IkiWiki/Plugin/sortnaturally.pm32
-rw-r--r--IkiWiki/Plugin/sparkline.pm10
-rw-r--r--IkiWiki/Plugin/svn.pm106
-rw-r--r--IkiWiki/Plugin/table.pm1
-rw-r--r--IkiWiki/Plugin/tag.pm88
-rw-r--r--IkiWiki/Plugin/template.pm55
-rw-r--r--IkiWiki/Plugin/teximg.pm44
-rw-r--r--IkiWiki/Plugin/textile.pm1
-rw-r--r--IkiWiki/Plugin/theme.pm65
-rw-r--r--IkiWiki/Plugin/tla.pm39
-rw-r--r--IkiWiki/Plugin/toc.pm12
-rw-r--r--IkiWiki/Plugin/toggle.pm12
-rw-r--r--IkiWiki/Plugin/txt.pm10
-rw-r--r--IkiWiki/Plugin/typography.pm2
-rw-r--r--IkiWiki/Plugin/underlay.pm10
-rw-r--r--IkiWiki/Plugin/version.pm1
-rw-r--r--IkiWiki/Plugin/websetup.pm95
-rw-r--r--IkiWiki/Plugin/wikitext.pm1
-rw-r--r--IkiWiki/Plugin/wmd.pm4
-rw-r--r--IkiWiki/Receive.pm12
-rw-r--r--IkiWiki/Render.pm700
-rw-r--r--IkiWiki/Setup.pm201
-rw-r--r--IkiWiki/Setup/Automator.pm53
-rw-r--r--IkiWiki/Setup/Standard.pm72
-rw-r--r--IkiWiki/Setup/Yaml.pm50
-rw-r--r--IkiWiki/Wrapper.pm102
107 files changed, 4635 insertions, 1757 deletions
diff --git a/IkiWiki/CGI.pm b/IkiWiki/CGI.pm
index af58d7cb5..f2a32a958 100644
--- a/IkiWiki/CGI.pm
+++ b/IkiWiki/CGI.pm
@@ -15,13 +15,14 @@ sub printheader ($) {
if ($config{sslcookie}) {
print $session->header(-charset => 'utf-8',
-cookie => $session->cookie(-httponly => 1, -secure => 1));
- } else {
+ }
+ else {
print $session->header(-charset => 'utf-8',
-cookie => $session->cookie(-httponly => 1));
}
}
-sub showform ($$$$;@) {
+sub prepform {
my $form=shift;
my $buttons=shift;
my $session=shift;
@@ -34,13 +35,24 @@ sub showform ($$$$;@) {
});
}
+ return $form;
+}
+
+sub showform ($$$$;@) {
+ my $form=prepform(@_);
+ shift;
+ my $buttons=shift;
+ my $session=shift;
+ my $cgi=shift;
+
printheader($session);
print misctemplate($form->title, $form->render(submit => $buttons), @_);
}
sub redirect ($$) {
my $q=shift;
- my $url=shift;
+ eval q{use URI};
+ my $url=URI->new(shift);
if (! $config{w3mmode}) {
print $q->redirect($url);
}
@@ -51,7 +63,7 @@ sub redirect ($$) {
}
sub decode_cgi_utf8 ($) {
- # decode_form_utf8 method is needed for 5.10
+ # decode_form_utf8 method is needed for 5.01
if ($] < 5.01) {
my $cgi = shift;
foreach my $f ($cgi->param) {
@@ -64,8 +76,9 @@ sub decode_form_utf8 ($) {
if ($] >= 5.01) {
my $form = shift;
foreach my $f ($form->field) {
+ my @value=map { decode_utf8($_) } $form->field($f);
$form->field(name => $f,
- value => decode_utf8($form->field($f)),
+ value => \@value,
force => 1,
);
}
@@ -87,9 +100,10 @@ sub needsignin ($$) {
}
}
-sub cgi_signin ($$) {
+sub cgi_signin ($$;$) {
my $q=shift;
my $session=shift;
+ my $returnhtml=shift;
decode_cgi_utf8($q);
eval q{use CGI::FormBuilder};
@@ -105,13 +119,10 @@ sub cgi_signin ($$) {
action => $config{cgiurl},
header => 0,
template => {type => 'div'},
- stylesheet => baseurl()."style.css",
+ stylesheet => 1,
);
my $buttons=["Login"];
- if ($q->param("do") ne "signin" && !$form->submitted) {
- $form->text(gettext("You need to log in first."));
- }
$form->field(name => "do", type => "hidden", value => "signin",
force => 1);
@@ -126,6 +137,11 @@ sub cgi_signin ($$) {
$form->validate;
}
+ if ($returnhtml) {
+ $form=prepform($form, $buttons, $session, $q);
+ return $form->render(submit => $buttons);
+ }
+
showform($form, $buttons, $session, $q);
}
@@ -184,7 +200,7 @@ sub cgi_prefs ($$) {
params => $q,
action => $config{cgiurl},
template => {type => 'div'},
- stylesheet => baseurl()."style.css",
+ stylesheet => 1,
fieldsets => [
[login => gettext("Login")],
[preferences => gettext("Preferences")],
@@ -231,14 +247,20 @@ sub cgi_prefs ($$) {
$form->text(gettext("Preferences saved."));
}
- showform($form, $buttons, $session, $q);
+ showform($form, $buttons, $session, $q,
+ prefsurl => "", # avoid showing the preferences link
+ );
}
-sub cgi_custom_failure ($$) {
- my $header=shift;
+sub cgi_custom_failure ($$$) {
+ my $q=shift;
+ my $httpstatus=shift;
my $message=shift;
- print $header;
+ print $q->header(
+ -status => $httpstatus,
+ -charset => 'utf-8',
+ );
print $message;
# Internet Explod^Hrer won't show custom 404 responses
@@ -252,16 +274,30 @@ sub check_banned ($$) {
my $q=shift;
my $session=shift;
+ my $banned=0;
my $name=$session->param("name");
- if (defined $name) {
- if (grep { $name eq $_ } @{$config{banned_users}}) {
- $session->delete();
- cgi_savesession($session);
- cgi_custom_failure(
- $q->header(-status => "403 Forbidden"),
- gettext("You are banned."));
+ if (defined $name &&
+ grep { $name eq $_ } @{$config{banned_users}}) {
+ $banned=1;
+ }
+
+ foreach my $b (@{$config{banned_users}}) {
+ if (pagespec_match("", $b,
+ ip => $session->remote_addr(),
+ name => defined $name ? $name : "",
+ )) {
+ $banned=1;
+ last;
}
}
+
+ if ($banned) {
+ $session->delete();
+ cgi_savesession($session);
+ cgi_custom_failure(
+ $q, "403 Forbidden",
+ gettext("You are banned."));
+ }
}
sub cgi_getsession ($) {
diff --git a/IkiWiki/Plugin/404.pm b/IkiWiki/Plugin/404.pm
index bae9e15d1..42cfa9e8a 100644
--- a/IkiWiki/Plugin/404.pm
+++ b/IkiWiki/Plugin/404.pm
@@ -10,6 +10,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "cgi", id => '404', call => \&cgi);
+ hook(type => "getsetup", id => '404', call => \&getsetup);
IkiWiki::loadplugin("goto");
}
@@ -21,6 +22,7 @@ sub getsetup () {
# server admin action too
safe => 0,
rebuild => 0,
+ section => "web",
}
}
@@ -69,7 +71,8 @@ sub cgi ($) {
if (exists $ENV{REDIRECT_STATUS} &&
$ENV{REDIRECT_STATUS} eq '404') {
- my $page = cgi_page_from_404($ENV{REDIRECT_URL},
+ my $page = cgi_page_from_404(
+ Encode::decode_utf8($ENV{REDIRECT_URL}),
$config{url}, $config{usedirs});
IkiWiki::Plugin::goto::cgi_goto($cgi, $page);
}
diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
index 5a9eb433d..7789c4c2a 100644
--- a/IkiWiki/Plugin/aggregate.pm
+++ b/IkiWiki/Plugin/aggregate.pm
@@ -298,7 +298,7 @@ sub loadstate () {
return if $state_loaded;
$state_loaded=1;
if (-e "$config{wikistatedir}/aggregate") {
- open(IN, "$config{wikistatedir}/aggregate") ||
+ open(IN, "<", "$config{wikistatedir}/aggregate") ||
die "$config{wikistatedir}/aggregate: $!";
while (<IN>) {
$_=IkiWiki::possibly_foolish_untaint($_);
@@ -335,7 +335,7 @@ sub savestate () {
garbage_collect();
my $newfile="$config{wikistatedir}/aggregate.new";
my $cleanup = sub { unlink($newfile) };
- open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+ open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) {
my @line;
foreach my $field (keys %$data) {
@@ -356,6 +356,20 @@ sub savestate () {
close OUT || error("save $newfile: $!", $cleanup);
rename($newfile, "$config{wikistatedir}/aggregate") ||
error("rename $newfile: $!", $cleanup);
+
+ my $timestamp=undef;
+ foreach my $feed (keys %feeds) {
+ my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
+ if (! defined $timestamp || $timestamp > $t) {
+ $timestamp=$t;
+ }
+ }
+ $newfile=~s/\.new$/time/;
+ open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
+ if (defined $timestamp) {
+ print OUT $timestamp."\n";
+ }
+ close OUT || error("save $newfile: $!", $cleanup);
}
sub garbage_collect () {
diff --git a/IkiWiki/Plugin/amazon_s3.pm b/IkiWiki/Plugin/amazon_s3.pm
index 3571c4189..cfd8cd347 100644
--- a/IkiWiki/Plugin/amazon_s3.pm
+++ b/IkiWiki/Plugin/amazon_s3.pm
@@ -133,6 +133,10 @@ sub getbucket {
}
if (! $bucket) {
+ # Try to use existing bucket.
+ $bucket=$s3->bucket($config{amazon_s3_bucket});
+ }
+ if (! $bucket) {
error(gettext("Failed to create S3 bucket: ").
$s3->err.": ".$s3->errstr."\n");
}
@@ -178,7 +182,7 @@ sub writefile ($$$;$$) {
# First, write the file to disk.
my $ret=$IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::writefile'}->($file, $destdir, $content, $binary, $writer);
-
+
my @keys=IkiWiki::Plugin::amazon_s3::file2keys("$destdir/$file");
# Store the data in S3.
diff --git a/IkiWiki/Plugin/anonok.pm b/IkiWiki/Plugin/anonok.pm
index 243b98920..0e74cbfad 100644
--- a/IkiWiki/Plugin/anonok.pm
+++ b/IkiWiki/Plugin/anonok.pm
@@ -15,6 +15,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
anonok_pagespec => {
type => "pagespec",
diff --git a/IkiWiki/Plugin/attachment.pm b/IkiWiki/Plugin/attachment.pm
index 087c315a9..ee105a170 100644
--- a/IkiWiki/Plugin/attachment.pm
+++ b/IkiWiki/Plugin/attachment.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "web",
},
allowed_attachments => {
type => "pagespec",
@@ -57,7 +58,7 @@ sub check_canattach ($$;$) {
$config{allowed_attachments},
file => $file,
user => $session->param("name"),
- ip => $ENV{REMOTE_ADDR},
+ ip => $session->remote_addr(),
);
}
@@ -112,7 +113,7 @@ sub formbuilder (@) {
return if ! defined $form->field("do") || ($form->field("do") ne "edit" && $form->field("do") ne "create") ;
- my $filename=$q->param('attachment');
+ my $filename=Encode::decode_utf8($q->param('attachment'));
if (defined $filename && length $filename &&
($form->submitted eq "Upload Attachment" || $form->submitted eq "Save Page")) {
my $session=$params{session};
@@ -133,10 +134,13 @@ sub formbuilder (@) {
}
}
+ $filename=IkiWiki::basename($filename);
+ $filename=~s/.*\\+(.+)/$1/; # hello, windows
+
$filename=linkpage(IkiWiki::possibly_foolish_untaint(
attachment_location($form->field('page')).
- IkiWiki::basename($filename)));
- if (IkiWiki::file_pruned($filename, $config{srcdir})) {
+ $filename));
+ if (IkiWiki::file_pruned($filename)) {
error(gettext("bad attachment filename"));
}
@@ -179,9 +183,12 @@ sub formbuilder (@) {
if ($config{rcs}) {
IkiWiki::rcs_add($filename);
IkiWiki::disable_commit_hook();
- IkiWiki::rcs_commit($filename, gettext("attachment upload"),
- IkiWiki::rcs_prepedit($filename),
- $session->param("name"), $ENV{REMOTE_ADDR});
+ IkiWiki::rcs_commit(
+ file => $filename,
+ message => gettext("attachment upload"),
+ token => IkiWiki::rcs_prepedit($filename),
+ session => $session,
+ );
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
@@ -189,11 +196,19 @@ sub formbuilder (@) {
IkiWiki::saveindex();
}
elsif ($form->submitted eq "Insert Links") {
- my $page=quotemeta($q->param("page"));
+ my $page=quotemeta(Encode::decode_utf8($q->param("page")));
my $add="";
foreach my $f ($q->param("attachment_select")) {
+ $f=Encode::decode_utf8($f);
$f=~s/^$page\///;
- $add.="[[$f]]\n";
+ if (IkiWiki::isinlinableimage($f) &&
+ UNIVERSAL::can("IkiWiki::Plugin::img", "import")) {
+ $add.='[[!img '.$f.' align="right" size="" alt=""]]';
+ }
+ else {
+ $add.="[[$f]]";
+ }
+ $add.="\n";
}
$form->field(name => 'editcontent',
value => $form->field('editcontent')."\n\n".$add,
@@ -223,13 +238,13 @@ sub attachment_list ($) {
my @ret;
foreach my $f (values %pagesources) {
if (! defined pagetype($f) &&
- $f=~m/^\Q$loc\E[^\/]+$/ &&
- -e "$config{srcdir}/$f") {
+ $f=~m/^\Q$loc\E[^\/]+$/) {
push @ret, {
"field-select" => '<input type="checkbox" name="attachment_select" value="'.$f.'" />',
link => htmllink($page, $page, $f, noimageinline => 1),
size => IkiWiki::Plugin::filecheck::humansize((stat(_))[7]),
mtime => displaytime($IkiWiki::pagemtime{$f}),
+ mtime_raw => $IkiWiki::pagemtime{$f},
};
}
}
diff --git a/IkiWiki/Plugin/autoindex.pm b/IkiWiki/Plugin/autoindex.pm
index 555856b11..11595e217 100644
--- a/IkiWiki/Plugin/autoindex.pm
+++ b/IkiWiki/Plugin/autoindex.pm
@@ -33,21 +33,26 @@ sub genindex ($) {
sub refresh () {
eval q{use File::Find};
error($@) if $@;
+ eval q{use Cwd};
+ error($@) if $@;
+ my $origdir=getcwd();
my (%pages, %dirs);
foreach my $dir ($config{srcdir}, @{$config{underlaydirs}}, $config{underlaydir}) {
+ chdir($dir) || next;
+
find({
no_chdir => 1,
wanted => sub {
- $_=decode_utf8($_);
- if (IkiWiki::file_pruned($_, $dir)) {
+ my $file=decode_utf8($_);
+ $file=~s/^\.\/?//;
+ return unless length $file;
+ if (IkiWiki::file_pruned($file)) {
$File::Find::prune=1;
}
elsif (! -l $_) {
- my ($f)=/$config{wiki_file_regexp}/; # untaint
+ my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint
return unless defined $f;
- $f=~s/^\Q$dir\E\/?//;
- return unless length $f;
return if $f =~ /\._([^.]+)$/; # skip internal page
if (! -d _) {
$pages{pagename($f)}=1;
@@ -57,12 +62,22 @@ sub refresh () {
}
}
}
- }, $dir);
+ }, '.');
+
+ chdir($origdir) || die "chdir $origdir: $!";
}
my %deleted;
- if (ref $pagestate{index}{autoindex}{deleted}) {
- %deleted=%{$pagestate{index}{autoindex}{deleted}};
+ if (ref $wikistate{autoindex}{deleted}) {
+ %deleted=%{$wikistate{autoindex}{deleted}};
+ }
+ elsif (ref $pagestate{index}{autoindex}{deleted}) {
+ # compatability code
+ %deleted=%{$pagestate{index}{autoindex}{deleted}};
+ delete $pagestate{index}{autoindex};
+ }
+
+ if (keys %deleted) {
foreach my $dir (keys %deleted) {
# remove deleted page state if the deleted page is re-added,
# or if all its subpages are deleted
@@ -71,7 +86,7 @@ sub refresh () {
delete $deleted{$dir};
}
}
- $pagestate{index}{autoindex}{deleted}=\%deleted;
+ $wikistate{autoindex}{deleted}=\%deleted;
}
my @needed;
@@ -82,10 +97,10 @@ sub refresh () {
# This page must have just been deleted, so
# don't re-add it. And remember it was
# deleted.
- if (! ref $pagestate{index}{autoindex}{deleted}) {
- $pagestate{index}{autoindex}{deleted}={};
+ if (! ref $wikistate{autoindex}{deleted}) {
+ $wikistate{autoindex}{deleted}={};
}
- ${$pagestate{index}{autoindex}{deleted}}{$dir}=1;
+ ${$wikistate{autoindex}{deleted}}{$dir}=1;
}
else {
push @needed, $dir;
@@ -102,8 +117,8 @@ sub refresh () {
}
if ($config{rcs}) {
IkiWiki::rcs_commit_staged(
- gettext("automatic index generation"),
- undef, undef);
+ message => gettext("automatic index generation"),
+ );
IkiWiki::enable_commit_hook();
}
}
diff --git a/IkiWiki/Plugin/blogspam.pm b/IkiWiki/Plugin/blogspam.pm
index 626c8ec42..8db3780e8 100644
--- a/IkiWiki/Plugin/blogspam.pm
+++ b/IkiWiki/Plugin/blogspam.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
blogspam_pagespec => {
type => 'pagespec',
@@ -57,6 +58,7 @@ sub checkconfig () {
sub checkcontent (@) {
my %params=@_;
+ my $session=$params{session};
if (exists $config{blogspam_pagespec}) {
return undef
@@ -87,7 +89,7 @@ sub checkcontent (@) {
push @options, "exclude=stopwords";
my %req=(
- ip => $ENV{REMOTE_ADDR},
+ ip => $session->remote_addr(),
comment => defined $params{diff} ? $params{diff} : $params{content},
subject => defined $params{subject} ? $params{subject} : "",
name => defined $params{author} ? $params{author} : "",
diff --git a/IkiWiki/Plugin/brokenlinks.pm b/IkiWiki/Plugin/brokenlinks.pm
index cf8f25281..8ee734bf9 100644
--- a/IkiWiki/Plugin/brokenlinks.pm
+++ b/IkiWiki/Plugin/brokenlinks.pm
@@ -23,36 +23,27 @@ sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
- # Needs to update whenever a page is added or removed, so
- # register a dependency.
- add_depends($params{page}, $params{pages});
-
- my %broken;
- foreach my $page (pagespec_match_list([keys %links],
- $params{pages}, location => $params{page})) {
- my $discussion=gettext("Discussion");
- my %seen;
- foreach my $link (@{$links{$page}}) {
- next if $seen{$link};
- $seen{$link}=1;
- next if $link =~ /.*\/\Q$discussion\E/i && $config{discussion};
- my $bestlink=bestlink($page, $link);
- next if length $bestlink;
- push @{$broken{$link}}, $page;
- }
- }
-
my @broken;
- foreach my $link (keys %broken) {
- my $page=$broken{$link}->[0];
+ foreach my $link (keys %IkiWiki::brokenlinks) {
+ next if $link =~ /.*\/\Q$config{discussionpage}\E/i && $config{discussion};
+
+ my @pages=pagespec_match_list($params{page}, $params{pages},
+ list => $IkiWiki::brokenlinks{$link},
+ # needs to update when links on a page change
+ deptype => deptype("links")
+ );
+ next unless @pages;
+
+ my $page=$IkiWiki::brokenlinks{$link}->[0];
push @broken, sprintf(gettext("%s from %s"),
htmllink($page, $params{destpage}, $link, noimageinline => 1),
join(", ", map {
htmllink($params{page}, $params{destpage}, $_, noimageinline => 1)
- } @{$broken{$link}}));
+ } @pages)
+ );
}
- return gettext("There are no broken links!") unless %broken;
+ return gettext("There are no broken links!") unless @broken;
return "<ul>\n"
.join("\n",
map {
diff --git a/IkiWiki/Plugin/bzr.pm b/IkiWiki/Plugin/bzr.pm
index 883007367..562d5d389 100644
--- a/IkiWiki/Plugin/bzr.pm
+++ b/IkiWiki/Plugin/bzr.pm
@@ -20,6 +20,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub checkconfig () {
@@ -36,6 +37,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
bzr_wrapper => {
type => "string",
@@ -72,31 +74,40 @@ sub bzr_log ($) {
my @infos = ();
my $key = undef;
+ my %info;
while (<$out>) {
my $line = $_;
my ($value);
if ($line =~ /^message:/) {
$key = "message";
- $infos[$#infos]{$key} = "";
+ $info{$key} = "";
}
elsif ($line =~ /^(modified|added|renamed|renamed and modified|removed):/) {
$key = "files";
- unless (defined($infos[$#infos]{$key})) { $infos[$#infos]{$key} = ""; }
+ $info{$key} = "" unless defined $info{$key};
}
elsif (defined($key) and $line =~ /^ (.*)/) {
- $infos[$#infos]{$key} .= "$1\n";
+ $info{$key} .= "$1\n";
}
elsif ($line eq "------------------------------------------------------------\n") {
+ push @infos, {%info} if keys %info;
+ %info = ();
$key = undef;
- push (@infos, {});
}
- else {
+ elsif ($line =~ /: /) {
chomp $line;
+ if ($line =~ /^revno: (\d+)/) {
+ $key = "revno";
+ $value = $1;
+ }
+ else {
($key, $value) = split /: +/, $line, 2;
- $infos[$#infos]{$key} = $value;
- }
+ }
+ $info{$key} = $value;
+ }
}
close $out;
+ push @infos, {%info} if keys %info;
return @infos;
}
@@ -112,8 +123,13 @@ sub rcs_prepedit ($) {
return "";
}
-sub bzr_author ($$) {
- my ($user, $ipaddr) = @_;
+sub bzr_author ($) {
+ my $session=shift;
+
+ return unless defined $session;
+
+ my $user=$session->param("name");
+ my $ipaddr=$session->remote_addr();
if (defined $user) {
return IkiWiki::possibly_foolish_untaint($user);
@@ -126,18 +142,19 @@ sub bzr_author ($$) {
}
}
-sub rcs_commit ($$$;$$) {
- my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
+sub rcs_commit (@) {
+ my %params=@_;
- $user = bzr_author($user, $ipaddr);
+ my $user=bzr_author($params{session});
- $message = IkiWiki::possibly_foolish_untaint($message);
- if (! length $message) {
- $message = "no message given";
+ $params{message} = IkiWiki::possibly_foolish_untaint($params{message});
+ if (! length $params{message}) {
+ $params{message} = "no message given";
}
- my @cmdline = ("bzr", "commit", "--quiet", "-m", $message, "--author", $user,
- $config{srcdir}."/".$file);
+ my @cmdline = ("bzr", "commit", "--quiet", "-m", $params{message},
+ (defined $user ? ("--author", $user) : ()),
+ $config{srcdir}."/".$params{file});
if (system(@cmdline) != 0) {
warn "'@cmdline' failed: $!";
}
@@ -145,19 +162,18 @@ sub rcs_commit ($$$;$$) {
return undef; # success
}
-sub rcs_commit_staged ($$$) {
- # Commits all staged changes. Changes can be staged using rcs_add,
- # rcs_remove, and rcs_rename.
- my ($message, $user, $ipaddr)=@_;
+sub rcs_commit_staged (@) {
+ my %params=@_;
- $user = bzr_author($user, $ipaddr);
+ my $user=bzr_author($params{session});
- $message = IkiWiki::possibly_foolish_untaint($message);
- if (! length $message) {
- $message = "no message given";
+ $params{message} = IkiWiki::possibly_foolish_untaint($params{message});
+ if (! length $params{message}) {
+ $params{message} = "no message given";
}
- my @cmdline = ("bzr", "commit", "--quiet", "-m", $message, "--author", $user,
+ my @cmdline = ("bzr", "commit", "--quiet", "-m", $params{message},
+ (defined $user ? ("--author", $user) : ()),
$config{srcdir});
if (system(@cmdline) != 0) {
warn "'@cmdline' failed: $!";
@@ -212,7 +228,7 @@ sub rcs_recentchanges ($) {
foreach my $info (bzr_log($out)) {
my @pages = ();
my @message = ();
-
+
foreach my $msgline (split(/\n/, $info->{message})) {
push @message, { line => $msgline };
}
@@ -275,14 +291,8 @@ sub rcs_diff ($) {
}
}
-sub rcs_getctime ($) {
- my ($file) = @_;
-
- # XXX filename passes through the shell here, should try to avoid
- # that just in case
- my @cmdline = ("bzr", "log", "--limit", '1', "$config{srcdir}/$file");
- open (my $out, "@cmdline |");
-
+sub extract_timestamp (@) {
+ open (my $out, "-|", @_);
my @log = bzr_log($out);
if (length @log < 1) {
@@ -292,8 +302,22 @@ sub rcs_getctime ($) {
eval q{use Date::Parse};
error($@) if $@;
- my $ctime = str2time($log[0]->{"timestamp"});
- return $ctime;
+ my $time = str2time($log[0]->{"timestamp"});
+ return $time;
+}
+
+sub rcs_getctime ($) {
+ my ($file) = @_;
+
+ my @cmdline = ("bzr", "log", "--forward", "--limit", '1', "$config{srcdir}/$file");
+ return extract_timestamp(@cmdline);
+}
+
+sub rcs_getmtime ($) {
+ my ($file) = @_;
+
+ my @cmdline = ("bzr", "log", "--limit", '1', "$config{srcdir}/$file");
+ return extract_timestamp(@cmdline);
}
1
diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm
index fe4b16072..bb995d499 100644
--- a/IkiWiki/Plugin/calendar.pm
+++ b/IkiWiki/Plugin/calendar.pm
@@ -22,10 +22,8 @@ use warnings;
use strict;
use IkiWiki 3.00;
use Time::Local;
-use POSIX;
+use POSIX ();
-my %cache;
-my %linkcache;
my $time=time;
my @now=localtime($time);
@@ -40,6 +38,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
archivebase => {
type => "string",
@@ -48,6 +47,14 @@ sub getsetup () {
safe => 1,
rebuild => 1,
},
+ archive_pagespec => {
+ type => "pagespec",
+ example => "page(posts/*) and !*/Discussion",
+ description => "PageSpec of pages to include in the archives; used by ikiwiki-calendar command",
+ link => 'ikiwiki/PageSpec',
+ safe => 1,
+ rebuild => 0,
+ },
}
sub is_leap_year (@) {
@@ -67,28 +74,56 @@ sub month_days {
sub format_month (@) {
my %params=@_;
- my $pagespec = $params{pages};
- my $year = $params{year};
- my $month = $params{month};
- my $pmonth = $params{pmonth};
- my $nmonth = $params{nmonth};
- my $pyear = $params{pyear};
- my $nyear = $params{nyear};
+ my %linkcache;
+ foreach my $p (pagespec_match_list($params{page},
+ "creation_year($params{year}) and creation_month($params{month}) and ($params{pages})",
+ # add presence dependencies to update
+ # month calendar when pages are added/removed
+ deptype => deptype("presence"))) {
+ my $mtime = $IkiWiki::pagectime{$p};
+ my @date = localtime($mtime);
+ my $mday = $date[3];
+ my $month = $date[4] + 1;
+ my $year = $date[5] + 1900;
+ my $mtag = sprintf("%02d", $month);
+
+ # Only one posting per day is being linked to.
+ $linkcache{"$year/$mtag/$mday"} = $p;
+ }
+
+ my $pmonth = $params{month} - 1;
+ my $nmonth = $params{month} + 1;
+ my $pyear = $params{year};
+ my $nyear = $params{year};
+
+ # Adjust for January and December
+ if ($params{month} == 1) {
+ $pmonth = 12;
+ $pyear--;
+ }
+ if ($params{month} == 12) {
+ $nmonth = 1;
+ $nyear++;
+ }
+
+ # Add padding.
+ $pmonth=sprintf("%02d", $pmonth);
+ $nmonth=sprintf("%02d", $nmonth);
- my @list;
my $calendar="\n";
# When did this month start?
- my @monthstart = localtime(timelocal(0,0,0,1,$month-1,$year-1900));
+ my @monthstart = localtime(timelocal(0,0,0,1,$params{month}-1,$params{year}-1900));
my $future_dom = 0;
my $today = 0;
- if ($year == $now[5]+1900 && $month == $now[4]+1) {
+ if ($params{year} == $now[5]+1900 && $params{month} == $now[4]+1) {
$future_dom = $now[3]+1;
$today = $now[3];
}
# Find out month names for this, next, and previous months
+ my $monthabbrev=POSIX::strftime("%b", @monthstart);
my $monthname=POSIX::strftime("%B", @monthstart);
my $pmonthname=POSIX::strftime("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900)));
my $nmonthname=POSIX::strftime("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900)));
@@ -98,34 +133,43 @@ sub format_month (@) {
$archivebase = $params{archivebase} if defined $params{archivebase};
# Calculate URL's for monthly archives.
- my ($url, $purl, $nurl)=("$monthname",'','');
- if (exists $cache{$pagespec}{"$year/$month"}) {
+ my ($url, $purl, $nurl)=("$monthname $params{year}",'','');
+ if (exists $pagesources{"$archivebase/$params{year}/$params{month}"}) {
$url = htmllink($params{page}, $params{destpage},
- "$archivebase/$year/".sprintf("%02d", $month),
- linktext => " $monthname ");
+ "$archivebase/$params{year}/".$params{month},
+ noimageinline => 1,
+ linktext => "$monthabbrev $params{year}",
+ title => $monthname);
}
- add_depends($params{page}, "$archivebase/$year/".sprintf("%02d", $month));
- if (exists $cache{$pagespec}{"$pyear/$pmonth"}) {
+ add_depends($params{page}, "$archivebase/$params{year}/$params{month}",
+ deptype("presence"));
+ if (exists $pagesources{"$archivebase/$pyear/$pmonth"}) {
$purl = htmllink($params{page}, $params{destpage},
- "$archivebase/$pyear/" . sprintf("%02d", $pmonth),
- linktext => " $pmonthname ");
+ "$archivebase/$pyear/$pmonth",
+ noimageinline => 1,
+ linktext => "\&larr;",
+ title => $pmonthname);
}
- add_depends($params{page}, "$archivebase/$pyear/".sprintf("%02d", $pmonth));
- if (exists $cache{$pagespec}{"$nyear/$nmonth"}) {
+ add_depends($params{page}, "$archivebase/$pyear/$pmonth",
+ deptype("presence"));
+ if (exists $pagesources{"$archivebase/$nyear/$nmonth"}) {
$nurl = htmllink($params{page}, $params{destpage},
- "$archivebase/$nyear/" . sprintf("%02d", $nmonth),
- linktext => " $nmonthname ");
+ "$archivebase/$nyear/$nmonth",
+ noimageinline => 1,
+ linktext => "\&rarr;",
+ title => $nmonthname);
}
- add_depends($params{page}, "$archivebase/$nyear/".sprintf("%02d", $nmonth));
+ add_depends($params{page}, "$archivebase/$nyear/$nmonth",
+ deptype("presence"));
# Start producing the month calendar
$calendar=<<EOF;
<table class="month-calendar">
- <caption class="month-calendar-head">
- $purl
- $url
- $nurl
- </caption>
+ <tr>
+ <th class="month-calendar-arrow">$purl</th>
+ <th class="month-calendar-head" colspan="5">$url</th>
+ <th class="month-calendar-arrow">$nurl</th>
+ </tr>
<tr>
EOF
@@ -137,12 +181,12 @@ EOF
my %downame;
my %dowabbr;
for my $dow ($week_start_day..$week_start_day+6) {
- my @day=localtime(timelocal(0,0,0,$start_day++,$month-1,$year-1900));
+ my @day=localtime(timelocal(0,0,0,$start_day++,$params{month}-1,$params{year}-1900));
my $downame = POSIX::strftime("%A", @day);
- my $dowabbr = POSIX::strftime("%a", @day);
+ my $dowabbr = substr($downame, 0, 1);
$downame{$dow % 7}=$downame;
$dowabbr{$dow % 7}=$dowabbr;
- $calendar.= qq{\t\t<th class="month-calendar-day-head $downame">$dowabbr</th>\n};
+ $calendar.= qq{\t\t<th class="month-calendar-day-head $downame" title="$downame">$dowabbr</th>\n};
}
$calendar.=<<EOF;
@@ -158,9 +202,9 @@ EOF
# At this point, either the first is a week_start_day, in which case
# nothing has been printed, or else we are in the middle of a row.
- for (my $day = 1; $day <= month_days(year => $year, month => $month);
+ for (my $day = 1; $day <= month_days(year => $params{year}, month => $params{month});
$day++, $wday++, $wday %= 7) {
- # At tihs point, on a week_start_day, we close out a row,
+ # At this point, on a week_start_day, we close out a row,
# and start a new one -- unless it is week_start_day on the
# first, where we do not close a row -- since none was started.
if ($wday == $week_start_day) {
@@ -169,8 +213,8 @@ EOF
}
my $tag;
- my $mtag = sprintf("%02d", $month);
- if (defined $cache{$pagespec}{"$year/$mtag/$day"}) {
+ my $key="$params{year}/$params{month}/$day";
+ if (defined $linkcache{$key}) {
if ($day == $today) {
$tag='month-calendar-day-this-day';
}
@@ -179,9 +223,10 @@ EOF
}
$calendar.=qq{\t\t<td class="$tag $downame{$wday}">};
$calendar.=htmllink($params{page}, $params{destpage},
- pagename($linkcache{"$year/$mtag/$day"}),
- "linktext" => "$day");
- push @list, pagename($linkcache{"$year/$mtag/$day"});
+ $linkcache{$key},
+ noimageinline => 1,
+ linktext => $day,
+ title => pagetitle(IkiWiki::basename($linkcache{$key})));
$calendar.=qq{</td>\n};
}
else {
@@ -207,100 +252,108 @@ EOF
</table>
EOF
- # Add dependencies to update the calendar whenever pages
- # matching the pagespec are added or removed.
- add_depends($params{page}, $params{pages});
- # Explicitly add all currently linked pages as dependencies, so
- # that if they are removed, the calendar will be sure to be updated.
- add_depends($params{page}, join(" or ", @list));
-
return $calendar;
}
sub format_year (@) {
my %params=@_;
-
- my $pagespec = $params{pages};
- my $year = $params{year};
- my $month = $params{month};
- my $pmonth = $params{pmonth};
- my $nmonth = $params{nmonth};
- my $pyear = $params{pyear};
- my $nyear = $params{nyear};
-
+
+ my @post_months;
+ foreach my $p (pagespec_match_list($params{page},
+ "creation_year($params{year}) and ($params{pages})",
+ # add presence dependencies to update
+ # year calendar's links to months when
+ # pages are added/removed
+ deptype => deptype("presence"))) {
+ my $mtime = $IkiWiki::pagectime{$p};
+ my @date = localtime($mtime);
+ my $month = $date[4] + 1;
+
+ $post_months[$month]++;
+ }
+
my $calendar="\n";
+
+ my $pyear = $params{year} - 1;
+ my $nyear = $params{year} + 1;
+ my $thisyear = $now[5]+1900;
my $future_month = 0;
- $future_month = $now[4]+1 if ($year == $now[5]+1900);
+ $future_month = $now[4]+1 if $params{year} == $thisyear;
my $archivebase = 'archives';
$archivebase = $config{archivebase} if defined $config{archivebase};
$archivebase = $params{archivebase} if defined $params{archivebase};
# calculate URL's for previous and next years
- my ($url, $purl, $nurl)=("$year",'','');
- if (exists $cache{$pagespec}{"$year"}) {
+ my ($url, $purl, $nurl)=("$params{year}",'','');
+ if (exists $pagesources{"$archivebase/$params{year}"}) {
$url = htmllink($params{page}, $params{destpage},
- "$archivebase/$year",
- linktext => "$year");
+ "$archivebase/$params{year}",
+ noimageinline => 1,
+ linktext => $params{year},
+ title => $params{year});
}
- add_depends($params{page}, "$archivebase/$year");
- if (exists $cache{$pagespec}{"$pyear"}) {
+ add_depends($params{page}, "$archivebase/$params{year}", deptype("presence"));
+ if (exists $pagesources{"$archivebase/$pyear"}) {
$purl = htmllink($params{page}, $params{destpage},
"$archivebase/$pyear",
- linktext => "\&larr;");
+ noimageinline => 1,
+ linktext => "\&larr;",
+ title => $pyear);
}
- add_depends($params{page}, "$archivebase/$pyear");
- if (exists $cache{$pagespec}{"$nyear"}) {
+ add_depends($params{page}, "$archivebase/$pyear", deptype("presence"));
+ if (exists $pagesources{"$archivebase/$nyear"}) {
$nurl = htmllink($params{page}, $params{destpage},
"$archivebase/$nyear",
- linktext => "\&rarr;");
+ noimageinline => 1,
+ linktext => "\&rarr;",
+ title => $nyear);
}
- add_depends($params{page}, "$archivebase/$nyear");
+ add_depends($params{page}, "$archivebase/$nyear", deptype("presence"));
# Start producing the year calendar
+ my $m=$params{months_per_row}-2;
$calendar=<<EOF;
<table class="year-calendar">
- <caption class="year-calendar-head">
- $purl
- $url
- $nurl
- </caption>
+ <tr>
+ <th class="year-calendar-arrow">$purl</th>
+ <th class="year-calendar-head" colspan="$m">$url</th>
+ <th class="year-calendar-arrow">$nurl</th>
+ </tr>
<tr>
<th class="year-calendar-subhead" colspan="$params{months_per_row}">Months</th>
</tr>
EOF
- for ($month = 1; $month <= 12; $month++) {
- my @day=localtime(timelocal(0,0,0,15,$month-1,$year-1900));
+ for (my $month = 1; $month <= 12; $month++) {
+ my @day=localtime(timelocal(0,0,0,15,$month-1,$params{year}-1900));
my $murl;
my $monthname = POSIX::strftime("%B", @day);
my $monthabbr = POSIX::strftime("%b", @day);
$calendar.=qq{\t<tr>\n} if ($month % $params{months_per_row} == 1);
my $tag;
my $mtag=sprintf("%02d", $month);
- if ($month == $params{month}) {
- if ($cache{$pagespec}{"$year/$mtag"}) {
- $tag = 'this_month_link';
- }
- else {
- $tag = 'this_month_nolink';
- }
+ if ($month == $params{month} && $thisyear == $params{year}) {
+ $tag = 'year-calendar-this-month';
}
- elsif ($cache{$pagespec}{"$year/$mtag"}) {
- $tag = 'month_link';
+ elsif ($pagesources{"$archivebase/$params{year}/$mtag"}) {
+ $tag = 'year-calendar-month-link';
}
elsif ($future_month && $month >= $future_month) {
- $tag = 'month_future';
+ $tag = 'year-calendar-month-future';
}
else {
- $tag = 'month_nolink';
+ $tag = 'year-calendar-month-nolink';
}
- if ($cache{$pagespec}{"$year/$mtag"}) {
+ if ($pagesources{"$archivebase/$params{year}/$mtag"} &&
+ $post_months[$mtag]) {
$murl = htmllink($params{page}, $params{destpage},
- "$archivebase/$year/$mtag",
- linktext => "$monthabbr");
+ "$archivebase/$params{year}/$mtag",
+ noimageinline => 1,
+ linktext => $monthabbr,
+ title => $monthname);
$calendar.=qq{\t<td class="$tag">};
$calendar.=$murl;
$calendar.=qq{\t</td>\n};
@@ -308,7 +361,8 @@ EOF
else {
$calendar.=qq{\t<td class="$tag">$monthabbr</td>\n};
}
- add_depends($params{page}, "$archivebase/$year/$mtag");
+ add_depends($params{page}, "$archivebase/$params{year}/$mtag",
+ deptype("presence"));
$calendar.=qq{\t</tr>\n} if ($month % $params{months_per_row} == 0);
}
@@ -320,76 +374,99 @@ EOF
return $calendar;
}
+sub setnextchange ($$) {
+ my $page=shift;
+ my $timestamp=shift;
+
+ if (! exists $pagestate{$page}{calendar}{nextchange} ||
+ $pagestate{$page}{calendar}{nextchange} > $timestamp) {
+ $pagestate{$page}{calendar}{nextchange}=$timestamp;
+ }
+}
+
sub preprocess (@) {
my %params=@_;
+
+ my $thisyear=1900 + $now[5];
+ my $thismonth=1 + $now[4];
+
$params{pages} = "*" unless defined $params{pages};
$params{type} = "month" unless defined $params{type};
- $params{month} = sprintf("%02d", $params{month}) if defined $params{month};
$params{week_start_day} = 0 unless defined $params{week_start_day};
$params{months_per_row} = 3 unless defined $params{months_per_row};
+ $params{year} = $thisyear unless defined $params{year};
+ $params{month} = $thismonth unless defined $params{month};
- if (! defined $params{year} || ! defined $params{month}) {
- # Record that the calendar next changes at midnight.
- $pagestate{$params{destpage}}{calendar}{nextchange}=($time
+ my $relativeyear=0;
+ if ($params{year} < 1) {
+ $relativeyear=1;
+ $params{year}=$thisyear+$params{year};
+ }
+ my $relativemonth=0;
+ if ($params{month} < 1) {
+ $relativemonth=1;
+ my $monthoff=$params{month};
+ $params{month}=($thismonth+$monthoff) % 12;
+ $params{month}=12 if $params{month}==0;
+ my $yearoff=POSIX::ceil(($thismonth-$params{month}) / -12)
+ - int($monthoff / 12);
+ $params{year}-=$yearoff;
+ }
+
+ $params{month} = sprintf("%02d", $params{month});
+
+ if ($params{type} eq 'month' && $params{year} == $thisyear
+ && $params{month} == $thismonth) {
+ # calendar for current month, updates next midnight
+ setnextchange($params{destpage}, ($time
+ (60 - $now[0]) # seconds
+ (59 - $now[1]) * 60 # minutes
+ (23 - $now[2]) * 60 * 60 # hours
- );
-
- $params{year} = 1900 + $now[5] unless defined $params{year};
- $params{month} = 1 + $now[4] unless defined $params{month};
+ ));
}
- else {
- delete $pagestate{$params{destpage}}{calendar};
+ elsif ($params{type} eq 'month' &&
+ (($params{year} == $thisyear && $params{month} > $thismonth) ||
+ $params{year} > $thisyear)) {
+ # calendar for upcoming month, updates 1st of that month
+ setnextchange($params{destpage},
+ timelocal(0, 0, 0, 1, $params{month}-1, $params{year}));
}
-
- # Calculate month names for next month, and previous months
- my $pmonth = $params{month} - 1;
- my $nmonth = $params{month} + 1;
- my $pyear = $params{year} - 1;
- my $nyear = $params{year} + 1;
-
- # Adjust for January and December
- if ($params{month} == 1) {
- $pmonth = 12;
- $pyear--;
+ elsif (($params{type} eq 'year' && $params{year} == $thisyear) ||
+ $relativemonth) {
+ # Calendar for current year updates 1st of next month.
+ # Any calendar relative to the current month also updates
+ # then.
+ if ($thismonth < 12) {
+ setnextchange($params{destpage},
+ timelocal(0, 0, 0, 1, $thismonth+1-1, $params{year}));
+ }
+ else {
+ setnextchange($params{destpage},
+ timelocal(0, 0, 0, 1, 1-1, $params{year}+1));
+ }
}
- if ($params{month} == 12) {
- $nmonth = 1;
- $nyear++;
+ elsif ($relativeyear) {
+ # Any calendar relative to the current year updates 1st
+ # of next year.
+ setnextchange($params{destpage},
+ timelocal(0, 0, 0, 1, 1-1, $thisyear+1));
}
-
- $params{pmonth}=$pmonth;
- $params{nmonth}=$nmonth;
- $params{pyear} =$pyear;
- $params{nyear} =$nyear;
-
- my $calendar="\n";
- my $pagespec=$params{pages};
- my $page =$params{page};
-
- if (! defined $cache{$pagespec}) {
- foreach my $p (pagespec_match_list([keys %pagesources], $pagespec)) {
- my $mtime = $IkiWiki::pagectime{$p};
- my $src = $pagesources{$p};
- my @date = localtime($mtime);
- my $mday = $date[3];
- my $month = $date[4] + 1;
- my $year = $date[5] + 1900;
- my $mtag = sprintf("%02d", $month);
-
- # Only one posting per day is being linked to.
- $linkcache{"$year/$mtag/$mday"} = "$src";
- $cache{$pagespec}{"$year"}++;
- $cache{$pagespec}{"$year/$mtag"}++;
- $cache{$pagespec}{"$year/$mtag/$mday"}++;
- }
+ elsif ($params{type} eq 'year' && $params{year} > $thisyear) {
+ # calendar for upcoming year, updates 1st of that year
+ setnextchange($params{destpage},
+ timelocal(0, 0, 0, 1, 1-1, $params{year}));
+ }
+ else {
+ # calendar for past month or year, does not need
+ # to update any more
+ delete $pagestate{$params{destpage}}{calendar};
}
- if ($params{type} =~ /month/i) {
+ my $calendar="";
+ if ($params{type} eq 'month') {
$calendar=format_month(%params);
}
- elsif ($params{type} =~ /year/i) {
+ elsif ($params{type} eq 'year') {
$calendar=format_year(%params);
}
diff --git a/IkiWiki/Plugin/color.pm b/IkiWiki/Plugin/color.pm
index 20505893b..9bb2359ce 100644
--- a/IkiWiki/Plugin/color.pm
+++ b/IkiWiki/Plugin/color.pm
@@ -10,6 +10,16 @@ use IkiWiki 3.00;
sub import {
hook(type => "preprocess", id => "color", call => \&preprocess);
hook(type => "format", id => "color", call => \&format);
+ hook(type => "getsetup", id => "color", call => \&getsetup);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ section => "widget",
+ },
}
sub preserve_style ($$$) {
@@ -51,12 +61,11 @@ sub replace_preserved_style ($) {
sub preprocess (@) {
my %params = @_;
- # Preprocess the text to expand any preprocessor directives
- # embedded inside it.
- $params{text} = IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage}, $params{text}));
-
- return preserve_style($params{foreground}, $params{background}, $params{text});
+ return preserve_style($params{foreground}, $params{background},
+ # Preprocess the text to expand any preprocessor directives
+ # embedded inside it.
+ IkiWiki::preprocess($params{page}, $params{destpage},
+ $params{text}));
}
sub format (@) {
diff --git a/IkiWiki/Plugin/comments.pm b/IkiWiki/Plugin/comments.pm
index 517e16f9f..851f4862e 100644
--- a/IkiWiki/Plugin/comments.pm
+++ b/IkiWiki/Plugin/comments.pm
@@ -22,12 +22,16 @@ sub import {
hook(type => "checkconfig", id => 'comments', call => \&checkconfig);
hook(type => "getsetup", id => 'comments', call => \&getsetup);
hook(type => "preprocess", id => 'comment', call => \&preprocess);
+ hook(type => "preprocess", id => 'commentmoderation', call => \&preprocess_moderation);
# here for backwards compatability with old comments
hook(type => "preprocess", id => '_comment', call => \&preprocess);
hook(type => "sessioncgi", id => 'comment', call => \&sessioncgi);
hook(type => "htmlize", id => "_comment", call => \&htmlize);
+ hook(type => "htmlize", id => "_comment_pending",
+ call => \&htmlize_pending);
hook(type => "pagetemplate", id => "comments", call => \&pagetemplate);
- hook(type => "formbuilder_setup", id => "comments", call => \&formbuilder_setup);
+ hook(type => "formbuilder_setup", id => "comments",
+ call => \&formbuilder_setup);
# Load goto to fix up user page links for logged-in commenters
IkiWiki::loadplugin("goto");
IkiWiki::loadplugin("inline");
@@ -38,6 +42,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "web",
},
comments_pagespec => {
type => 'pagespec',
@@ -103,6 +108,14 @@ sub htmlize {
return $params{content};
}
+sub htmlize_pending {
+ my %params = @_;
+ return sprintf(gettext("this comment needs %s"),
+ '<a href="'.
+ IkiWiki::cgiurl(do => "commentmoderation").'">'.
+ gettext("moderation").'</a>');
+}
+
# FIXME: copied verbatim from meta
sub safeurl ($) {
my $url=shift;
@@ -130,8 +143,6 @@ sub preprocess {
}
$content =~ s/\\"/"/g;
- $content = IkiWiki::filter($page, $params{destpage}, $content);
-
if ($config{comments_allowdirectives}) {
$content = IkiWiki::preprocess($page, $params{destpage},
$content);
@@ -165,15 +176,14 @@ sub preprocess {
if (defined $oiduser) {
# looks like an OpenID
$commentauthorurl = $commentuser;
- $commentauthor = $oiduser;
+ $commentauthor = (defined $params{nickname} && length $params{nickname}) ? $params{nickname} : $oiduser;
$commentopenid = $commentuser;
}
else {
$commentauthorurl = IkiWiki::cgiurl(
do => 'goto',
- page => (length $config{userdir}
- ? "$config{userdir}/$commentuser"
- : "$commentuser"));
+ page => IkiWiki::userpage($commentuser)
+ );
$commentauthor = $commentuser;
}
@@ -221,10 +231,12 @@ sub preprocess {
}
if (defined $params{subject}) {
- $pagestate{$page}{meta}{title} = $params{subject};
+ # decode title the same way meta does
+ eval q{use HTML::Entities};
+ $pagestate{$page}{meta}{title} = decode_entities($params{subject});
}
- if ($params{page} =~ m/\/(\Q$config{comments_pagename}\E\d+)$/) {
+ if ($params{page} =~ m/\/\Q$config{comments_pagename}\E\d+_/) {
$pagestate{$page}{meta}{permalink} = urlto(IkiWiki::dirname($params{page}), undef, 1).
"#".page_to_id($params{page});
}
@@ -238,6 +250,22 @@ sub preprocess {
return $content;
}
+sub preprocess_moderation {
+ my %params = @_;
+
+ $params{desc}=gettext("Comment Moderation")
+ unless defined $params{desc};
+
+ if (length $config{cgiurl}) {
+ return '<a href="'.
+ IkiWiki::cgiurl(do => 'commentmoderation').
+ '">'.$params{desc}.'</a>';
+ }
+ else {
+ return $params{desc};
+ }
+}
+
sub sessioncgi ($$) {
my $cgi=shift;
my $session=shift;
@@ -249,6 +277,10 @@ sub sessioncgi ($$) {
elsif ($do eq 'commentmoderation') {
commentmoderation($cgi, $session);
}
+ elsif ($do eq 'commentsignin') {
+ IkiWiki::cgi_signin($cgi, $session);
+ exit;
+ }
}
# Mostly cargo-culted from IkiWiki::plugin::editpage
@@ -272,7 +304,7 @@ sub editcomment ($$) {
action => $config{cgiurl},
header => 0,
table => 0,
- template => scalar IkiWiki::template_params('editcomment.tmpl'),
+ template => { template('editcomment.tmpl') },
);
IkiWiki::decode_form_utf8($form);
@@ -326,7 +358,7 @@ sub editcomment ($$) {
if (! defined $session->param('name')) {
# Make signinurl work and return here.
- $form->tmpl_param(signinurl => IkiWiki::cgiurl(do => 'signin'));
+ $form->tmpl_param(signinurl => IkiWiki::cgiurl(do => 'commentsignin'));
$session->param(postsignin => $ENV{QUERY_STRING});
IkiWiki::cgi_savesession($session);
}
@@ -336,7 +368,7 @@ sub editcomment ($$) {
my $page = $form->field('page');
$page = IkiWiki::possibly_foolish_untaint($page);
if (! defined $page || ! length $page ||
- IkiWiki::file_pruned($page, $config{srcdir})) {
+ IkiWiki::file_pruned($page)) {
error(gettext("bad page name"));
}
@@ -377,18 +409,20 @@ sub editcomment ($$) {
IkiWiki::check_canedit($page, $cgi, $session);
$postcomment=0;
- my $location=unique_comment_location($page, $config{srcdir});
-
my $content = "[[!comment format=$type\n";
- # FIXME: handling of double quotes probably wrong?
if (defined $session->param('name')) {
my $username = $session->param('name');
$username =~ s/"/&quot;/g;
$content .= " username=\"$username\"\n";
}
- elsif (defined $ENV{REMOTE_ADDR}) {
- my $ip = $ENV{REMOTE_ADDR};
+ if (defined $session->param('nickname')) {
+ my $nickname = $session->param('nickname');
+ $nickname =~ s/"/&quot;/g;
+ $content .= " nickname=\"$nickname\"\n";
+ }
+ elsif (defined $session->remote_addr()) {
+ my $ip = $session->remote_addr();
if ($ip =~ m/^([.0-9]+)$/) {
$content .= " ip=\"$1\"\n";
}
@@ -410,17 +444,23 @@ sub editcomment ($$) {
my $subject = $form->field('subject');
if (defined $subject && length $subject) {
$subject =~ s/"/&quot;/g;
- $content .= " subject=\"$subject\"\n";
}
+ else {
+ $subject = "comment ".(num_comments($page, $config{srcdir}) + 1);
+ }
+ $content .= " subject=\"$subject\"\n";
$content .= " date=\"" . decode_utf8(strftime('%Y-%m-%dT%H:%M:%SZ', gmtime)) . "\"\n";
- my $editcontent = $form->field('editcontent') || '';
+ my $editcontent = $form->field('editcontent');
+ $editcontent="" if ! defined $editcontent;
$editcontent =~ s/\r\n/\n/g;
$editcontent =~ s/\r/\n/g;
$editcontent =~ s/"/\\"/g;
$content .= " content=\"\"\"\n$editcontent\n\"\"\"]]\n";
+ my $location=unique_comment_location($page, $content, $config{srcdir});
+
# This is essentially a simplified version of editpage:
# - the user does not control the page that's created, only the parent
# - it's always a create operation, never an edit
@@ -457,9 +497,15 @@ sub editcomment ($$) {
$postcomment=0;
if (! $ok) {
- my $penddir=$config{wikistatedir}."/comments_pending";
- $location=unique_comment_location($page, $penddir);
- writefile("$location._comment", $penddir, $content);
+ $location=unique_comment_location($page, $content, $config{srcdir}, "._comment_pending");
+ writefile("$location._comment_pending", $config{srcdir}, $content);
+
+ # Refresh so anything that deals with pending
+ # comments can be updated.
+ require IkiWiki::Render;
+ IkiWiki::refresh();
+ IkiWiki::saveindex();
+
IkiWiki::printheader($session);
print IkiWiki::misctemplate(gettext(gettext("comment stored for moderation")),
"<p>".
@@ -486,8 +532,10 @@ sub editcomment ($$) {
IkiWiki::rcs_add($file);
IkiWiki::disable_commit_hook();
- $conflict = IkiWiki::rcs_commit_staged($message,
- $session->param('name'), $ENV{REMOTE_ADDR});
+ $conflict = IkiWiki::rcs_commit_staged(
+ message => $message,
+ session => $session,
+ );
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
@@ -510,7 +558,7 @@ sub editcomment ($$) {
}
else {
IkiWiki::showform ($form, \@buttons, $session, $cgi,
- forcebaseurl => $baseurl);
+ forcebaseurl => $baseurl, page => $page);
}
exit;
@@ -535,26 +583,30 @@ sub commentmoderation ($$) {
my %vars=$cgi->Vars;
my $added=0;
foreach my $id (keys %vars) {
- if ($id =~ /(.*)\Q._comment\E$/) {
+ if ($id =~ /(.*)\._comment(?:_pending)?$/) {
+ $id=decode_utf8($id);
my $action=$cgi->param($id);
next if $action eq 'Defer' && ! $rejectalldefer;
# Make sure that the id is of a legal
- # pending comment before untainting.
- my ($f)= $id =~ /$config{wiki_file_regexp}/;
+ # pending comment.
+ my ($f) = $id =~ /$config{wiki_file_regexp}/;
if (! defined $f || ! length $f ||
- IkiWiki::file_pruned($f, $config{srcdir})) {
+ IkiWiki::file_pruned($f)) {
error("illegal file");
}
- my $page=IkiWiki::possibly_foolish_untaint(IkiWiki::dirname($1));
- my $file="$config{wikistatedir}/comments_pending/".
- IkiWiki::possibly_foolish_untaint($id);
+ my $page=IkiWiki::dirname($f);
+ my $file="$config{srcdir}/$f";
+ if (! -e $file) {
+ # old location
+ $file="$config{wikistatedir}/comments_pending/".$f;
+ }
if ($action eq 'Accept') {
my $content=eval { readfile($file) };
next if $@; # file vanished since form was displayed
- my $dest=unique_comment_location($page, $config{srcdir})."._comment";
+ my $dest=unique_comment_location($page, $content, $config{srcdir})."._comment";
writefile($dest, $config{srcdir}, $content);
if ($config{rcs} and $config{comments_commit}) {
IkiWiki::rcs_add($dest);
@@ -562,9 +614,6 @@ sub commentmoderation ($$) {
$added++;
}
- # This removes empty subdirs, so the
- # .ikiwiki/comments_pending dir will
- # go away when all are moderated.
require IkiWiki::Render;
IkiWiki::prune($file);
}
@@ -575,8 +624,10 @@ sub commentmoderation ($$) {
if ($config{rcs} and $config{comments_commit}) {
my $message = gettext("Comment moderation");
IkiWiki::disable_commit_hook();
- $conflict=IkiWiki::rcs_commit_staged($message,
- $session->param('name'), $ENV{REMOTE_ADDR});
+ $conflict=IkiWiki::rcs_commit_staged(
+ message => $message,
+ session => $session,
+ );
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
@@ -591,16 +642,15 @@ sub commentmoderation ($$) {
}
my @comments=map {
- my ($id, $ctime)=@{$_};
- my $file="$config{wikistatedir}/comments_pending/$id";
- my $content=readfile($file);
+ my ($id, $dir, $ctime)=@{$_};
+ my $content=readfile("$dir/$id");
my $preview=previewcomment($content, $id,
- IkiWiki::dirname($_), $ctime);
+ $id, $ctime);
{
id => $id,
view => $preview,
- }
- } sort { $b->[1] <=> $a->[1] } comments_pending();
+ }
+ } sort { $b->[2] <=> $a->[2] } comments_pending();
my $template=template("commentmoderation.tmpl");
$template->param(
@@ -630,30 +680,43 @@ sub formbuilder_setup (@) {
}
sub comments_pending () {
- my $dir="$config{wikistatedir}/comments_pending/";
- return unless -d $dir;
-
my @ret;
+
eval q{use File::Find};
error($@) if $@;
- find({
- no_chdir => 1,
- wanted => sub {
- $_=decode_utf8($_);
- if (IkiWiki::file_pruned($_, $dir)) {
- $File::Find::prune=1;
- }
- elsif (! -l $_ && ! -d _) {
- $File::Find::prune=0;
- my ($f)=/$config{wiki_file_regexp}/; # untaint
- if (defined $f && $f =~ /\Q._comment\E$/) {
- my $ctime=(stat($f))[10];
- $f=~s/^\Q$dir\E\/?//;
- push @ret, [$f, $ctime];
+ eval q{use Cwd};
+ error($@) if $@;
+ my $origdir=getcwd();
+
+ my $find_comments=sub {
+ my $dir=shift;
+ my $extension=shift;
+ return unless -d $dir;
+
+ chdir($dir) || die "chdir $dir: $!";
+
+ find({
+ no_chdir => 1,
+ wanted => sub {
+ my $file=decode_utf8($_);
+ $file=~s/^\.\///;
+ return if ! length $file || IkiWiki::file_pruned($file)
+ || -l $_ || -d _ || $file !~ /\Q$extension\E$/;
+ my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint
+ if (defined $f) {
+ my $ctime=(stat($_))[10];
+ push @ret, [$f, $dir, $ctime];
}
}
- }
- }, $dir);
+ }, ".");
+
+ chdir($origdir) || die "chdir $origdir: $!";
+ };
+
+ $find_comments->($config{srcdir}, "._comment_pending");
+ # old location
+ $find_comments->("$config{wikistatedir}/comments_pending/",
+ "._comment");
return @ret;
}
@@ -671,7 +734,8 @@ sub previewcomment ($$$) {
my $template = template("comment.tmpl");
$template->param(content => $preview);
- $template->param(ctime => displaytime($time));
+ $template->param(ctime => displaytime($time, undef, 1));
+ $template->param(html5 => $config{html5});
IkiWiki::run_hooks(pagetemplate => sub {
shift->(page => $location,
@@ -687,7 +751,7 @@ sub previewcomment ($$$) {
sub commentsshown ($) {
my $page=shift;
- return ! pagespec_match($page, "internal(*/$config{comments_pagename}*)",
+ return ! pagespec_match($page, "comment(*)",
location => $page) &&
pagespec_match($page, $config{comments_pagespec},
location => $page);
@@ -717,7 +781,7 @@ sub pagetemplate (@) {
my $comments = undef;
if ($shown) {
$comments = IkiWiki::preprocess_inline(
- pages => "internal($page/$config{comments_pagename}*)",
+ pages => "comment($page)",
template => 'comment',
show => 0,
reverse => 'yes',
@@ -733,39 +797,43 @@ sub pagetemplate (@) {
}
if ($shown && commentsopen($page)) {
- my $addcommenturl = IkiWiki::cgiurl(do => 'comment',
- page => $page);
- $template->param(addcommenturl => $addcommenturl);
+ $template->param(addcommenturl => addcommenturl($page));
}
}
- if ($template->query(name => 'commentsurl')) {
- if ($shown) {
+ if ($shown) {
+ if ($template->query(name => 'commentsurl')) {
$template->param(commentsurl =>
urlto($page, undef, 1).'#comments');
}
- }
- if ($template->query(name => 'atomcommentsurl') && $config{usedirs}) {
- if ($shown) {
+ if ($template->query(name => 'atomcommentsurl') && $config{usedirs}) {
# This will 404 until there are some comments, but I
# think that's probably OK...
$template->param(atomcommentsurl =>
urlto($page, undef, 1).'comments.atom');
}
- }
- if ($template->query(name => 'commentslink')) {
- # XXX Would be nice to say how many comments there are in
- # the link. But, to update the number, blog pages
- # would have to update whenever comments of any inlines
- # page are added, which is not currently done.
- if ($shown) {
- $template->param(commentslink =>
- htmllink($page, $params{destpage}, $page,
- linktext => gettext("Comments"),
+ if ($template->query(name => 'commentslink')) {
+ my $num=num_comments($page, $config{srcdir});
+ my $link;
+ if ($num > 0) {
+ $link = htmllink($page, $params{destpage}, $page,
+ linktext => sprintf(ngettext("%i comment", "%i comments", $num), $num),
anchor => "comments",
- noimageinline => 1));
+ noimageinline => 1
+ );
+ }
+ elsif (commentsopen($page)) {
+ $link = "<a href=\"".addcommenturl($page)."\">".
+ #translators: Here "Comment" is a verb;
+ #translators: the user clicks on it to
+ #translators: post a comment.
+ gettext("Comment").
+ "</a>";
+ }
+ $template->param(commentslink => $link)
+ if defined $link;
}
}
@@ -813,30 +881,48 @@ sub pagetemplate (@) {
}
}
-sub unique_comment_location ($) {
+sub addcommenturl ($) {
+ my $page=shift;
+
+ return IkiWiki::cgiurl(do => 'comment', page => $page);
+}
+
+sub num_comments ($$) {
my $page=shift;
my $dir=shift;
+ my @comments=glob("$dir/$page/$config{comments_pagename}*._comment");
+ return int @comments;
+}
+
+sub unique_comment_location ($$$$) {
+ my $page=shift;
+ eval q{use Digest::MD5 'md5_hex'};
+ error($@) if $@;
+ my $content_md5=md5_hex(Encode::encode_utf8(shift));
+ my $dir=shift;
+ my $ext=shift || "._comment";
+
my $location;
- my $i = 0;
+ my $i = num_comments($page, $dir);
do {
$i++;
- $location = "$page/$config{comments_pagename}$i";
- } while (-e "$dir/$location._comment");
+ $location = "$page/$config{comments_pagename}${i}_${content_md5}";
+ } while (-e "$dir/$location$ext");
return $location;
}
sub page_to_id ($) {
# Converts a comment page name into a unique, legal html id
- # addtibute value, that can be used as an anchor to link to the
+ # attribute value, that can be used as an anchor to link to the
# comment.
my $page=shift;
eval q{use Digest::MD5 'md5_hex'};
error($@) if $@;
- return "comment-".md5_hex($page);
+ return "comment-".md5_hex(Encode::encode_utf8(($page)));
}
package IkiWiki::PageSpec;
@@ -848,7 +934,39 @@ sub match_postcomment ($$;@) {
if (! $postcomment) {
return IkiWiki::FailReason->new("not posting a comment");
}
- return match_glob($page, $glob);
+ return match_glob($page, $glob, @_);
+}
+
+sub match_comment ($$;@) {
+ my $page = shift;
+ my $glob = shift;
+
+ # To see if it's a comment, check the source file type.
+ # Deal with comments that were just deleted.
+ my $source=exists $IkiWiki::pagesources{$page} ?
+ $IkiWiki::pagesources{$page} :
+ $IkiWiki::delpagesources{$page};
+ my $type=defined $source ? IkiWiki::pagetype($source) : undef;
+ if (! defined $type || $type ne "_comment") {
+ return IkiWiki::FailReason->new("$page is not a comment");
+ }
+
+ return match_glob($page, "$glob/*", internal => 1, @_);
+}
+
+sub match_comment_pending ($$;@) {
+ my $page = shift;
+ my $glob = shift;
+
+ my $source=exists $IkiWiki::pagesources{$page} ?
+ $IkiWiki::pagesources{$page} :
+ $IkiWiki::delpagesources{$page};
+ my $type=defined $source ? IkiWiki::pagetype($source) : undef;
+ if (! defined $type || $type ne "_comment_pending") {
+ return IkiWiki::FailReason->new("$page is not a pending comment");
+ }
+
+ return match_glob($page, "$glob/*", internal => 1, @_);
}
1
diff --git a/IkiWiki/Plugin/conditional.pm b/IkiWiki/Plugin/conditional.pm
index 7445dbdad..026078b3c 100644
--- a/IkiWiki/Plugin/conditional.pm
+++ b/IkiWiki/Plugin/conditional.pm
@@ -16,6 +16,7 @@ sub getsetup {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -29,11 +30,10 @@ sub preprocess_if (@) {
}
my $result=0;
- if ((exists $params{all} && lc $params{all} eq "no") ||
- # An optimisation to avoid needless looping over every page
- # and adding of dependencies for simple uses of some of the
- # tests.
- $params{test} =~ /^([\s\!()]*((enabled|sourcepage|destpage|included)\([^)]*\)|(and|or))[\s\!()]*)+$/) {
+ if ((exists $params{all} && ! IkiWiki::yesno($params{all})) ||
+ # An optimisation to avoid needless looping over every page
+ # for simple uses of some of the tests.
+ $params{test} =~ /^([\s\!()]*((enabled|sourcepage|destpage|included)\([^)]*\)|(and|or))[\s\!()]*)+$/) {
add_depends($params{page}, "($params{test}) and $params{page}");
$result=pagespec_match($params{page}, $params{test},
location => $params{page},
@@ -41,17 +41,12 @@ sub preprocess_if (@) {
destpage => $params{destpage});
}
else {
- add_depends($params{page}, $params{test});
-
- foreach my $page (keys %pagesources) {
- if (pagespec_match($page, $params{test},
- location => $params{page},
- sourcepage => $params{page},
- destpage => $params{destpage})) {
- $result=1;
- last;
- }
- }
+ $result=pagespec_match_list($params{page}, $params{test},
+ # stop after first match
+ num => 1,
+ sourcepage => $params{page},
+ destpage => $params{destpage},
+ );
}
my $ret;
@@ -64,8 +59,7 @@ sub preprocess_if (@) {
else {
$ret="";
}
- return IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage}, $ret));
+ return IkiWiki::preprocess($params{page}, $params{destpage}, $ret);
}
package IkiWiki::PageSpec;
diff --git a/IkiWiki/Plugin/creole.pm b/IkiWiki/Plugin/creole.pm
index 425e71043..a1e4b31d3 100644
--- a/IkiWiki/Plugin/creole.pm
+++ b/IkiWiki/Plugin/creole.pm
@@ -17,6 +17,7 @@ sub getsetup {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
}
diff --git a/IkiWiki/Plugin/cutpaste.pm b/IkiWiki/Plugin/cutpaste.pm
index 417442f34..4a8817168 100644
--- a/IkiWiki/Plugin/cutpaste.pm
+++ b/IkiWiki/Plugin/cutpaste.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -49,8 +50,8 @@ sub preprocess_copy (@) {
$savedtext{$params{page}} = {} if not exists $savedtext{$params{"page"}};
$savedtext{$params{page}}->{$params{id}} = $params{text};
- return IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage}, $params{text})) if defined wantarray;
+ return IkiWiki::preprocess($params{page}, $params{destpage}, $params{text})
+ if defined wantarray;
}
sub preprocess_paste (@) {
@@ -69,8 +70,8 @@ sub preprocess_paste (@) {
error sprintf(gettext('no text was copied in this page with id %s'), $params{id});
}
- return IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage}, $savedtext{$params{page}}->{$params{id}}));
+ return IkiWiki::preprocess($params{page}, $params{destpage},
+ $savedtext{$params{page}}->{$params{id}});
}
1;
diff --git a/IkiWiki/Plugin/cvs.pm b/IkiWiki/Plugin/cvs.pm
new file mode 100644
index 000000000..4972efb58
--- /dev/null
+++ b/IkiWiki/Plugin/cvs.pm
@@ -0,0 +1,495 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::cvs;
+
+# Copyright (c) 2009 Amitai Schlair
+# All rights reserved.
+#
+# This code is derived from software contributed to ikiwiki
+# by Amitai Schlair.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY IKIWIKI AND CONTRIBUTORS ``AS IS''
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION
+# OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+# SUCH DAMAGE.
+
+use warnings;
+use strict;
+use IkiWiki;
+
+use File::chdir;
+
+sub import {
+ hook(type => "genwrapper", id => "cvs", call => \&genwrapper);
+ hook(type => "checkconfig", id => "cvs", call => \&checkconfig);
+ hook(type => "getsetup", id => "cvs", call => \&getsetup);
+ hook(type => "rcs", id => "rcs_update", call => \&rcs_update);
+ hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit);
+ hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit);
+ hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged);
+ hook(type => "rcs", id => "rcs_add", call => \&rcs_add);
+ hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove);
+ hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename);
+ hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
+ hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
+ hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
+}
+
+sub genwrapper () {
+ return <<EOF;
+ {
+ int j;
+ for (j = 1; j < argc; j++)
+ if (strstr(argv[j], "New directory") != NULL)
+ exit(0);
+ }
+EOF
+}
+
+sub checkconfig () {
+ if (! defined $config{cvspath}) {
+ $config{cvspath}="ikiwiki";
+ }
+ if (exists $config{cvspath}) {
+ # code depends on the path not having extraneous slashes
+ $config{cvspath}=~tr#/#/#s;
+ $config{cvspath}=~s/\/$//;
+ $config{cvspath}=~s/^\///;
+ }
+ if (defined $config{cvs_wrapper} && length $config{cvs_wrapper}) {
+ push @{$config{wrappers}}, {
+ wrapper => $config{cvs_wrapper},
+ wrappermode => (defined $config{cvs_wrappermode} ? $config{cvs_wrappermode} : "04755"),
+ };
+ }
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 0, # rcs plugin
+ rebuild => undef,
+ section => "rcs",
+ },
+ cvsrepo => {
+ type => "string",
+ example => "/cvs/wikirepo",
+ description => "cvs repository location",
+ safe => 0, # path
+ rebuild => 0,
+ },
+ cvspath => {
+ type => "string",
+ example => "ikiwiki",
+ description => "path inside repository where the wiki is located",
+ safe => 0, # paranoia
+ rebuild => 0,
+ },
+ cvs_wrapper => {
+ type => "string",
+ example => "/cvs/wikirepo/CVSROOT/post-commit",
+ description => "cvs post-commit hook to generate (triggered by CVSROOT/loginfo entry)",
+ safe => 0, # file
+ rebuild => 0,
+ },
+ cvs_wrappermode => {
+ type => "string",
+ example => '04755',
+ description => "mode for cvs_wrapper (can safely be made suid)",
+ safe => 0,
+ rebuild => 0,
+ },
+ historyurl => {
+ type => "string",
+ example => "http://cvs.example.org/cvsweb.cgi/ikiwiki/[[file]]",
+ description => "cvsweb url to show file history ([[file]] substituted)",
+ safe => 1,
+ rebuild => 1,
+ },
+ diffurl => {
+ type => "string",
+ example => "http://cvs.example.org/cvsweb.cgi/ikiwiki/[[file]].diff?r1=text&amp;tr1=[[r1]]&amp;r2=text&amp;tr2=[[r2]]",
+ description => "cvsweb url to show a diff ([[file]], [[r1]], and [[r2]] substituted)",
+ safe => 1,
+ rebuild => 1,
+ },
+}
+
+sub cvs_info ($$) {
+ my $field=shift;
+ my $file=shift;
+
+ local $CWD = $config{srcdir};
+
+ my $info=`cvs status $file`;
+ my ($ret)=$info=~/^\s*$field:\s*(\S+)/m;
+ return $ret;
+}
+
+sub cvs_runcvs(@) {
+ my @cmd = @_;
+ unshift @cmd, 'cvs', '-Q';
+
+ local $CWD = $config{srcdir};
+
+ open(my $savedout, ">&STDOUT");
+ open(STDOUT, ">", "/dev/null");
+ my $ret = system(@cmd);
+ open(STDOUT, ">&", $savedout);
+
+ return ($ret == 0) ? 1 : 0;
+}
+
+sub cvs_is_controlling {
+ my $dir=shift;
+ $dir=$config{srcdir} unless defined($dir);
+ return (-d "$dir/CVS") ? 1 : 0;
+}
+
+sub rcs_update () {
+ return unless cvs_is_controlling;
+ cvs_runcvs('update', '-dP');
+}
+
+sub rcs_prepedit ($) {
+ # Prepares to edit a file under revision control. Returns a token
+ # that must be passed into rcs_commit when the file is ready
+ # for committing.
+ # The file is relative to the srcdir.
+ my $file=shift;
+
+ return unless cvs_is_controlling;
+
+ # For cvs, return the revision of the file when
+ # editing begins.
+ my $rev=cvs_info("Repository revision", "$file");
+ return defined $rev ? $rev : "";
+}
+
+sub commitmessage (@) {
+ my %params=@_;
+
+ if (defined $params{session}) {
+ if (defined $params{session}->param("name")) {
+ return "web commit by ".
+ $params{session}->param("name").
+ (length $params{message} ? ": $params{message}" : "");
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ return "web commit from ".
+ $params{session}->remote_addr().
+ (length $params{message} ? ": $params{message}" : "");
+ }
+ }
+ return $params{message};
+}
+
+sub rcs_commit (@) {
+ # Tries to commit the page; returns undef on _success_ and
+ # a version of the page with the rcs's conflict markers on failure.
+ # The file is relative to the srcdir.
+ my %params=@_;
+
+ return unless cvs_is_controlling;
+
+ # Check to see if the page has been changed by someone
+ # else since rcs_prepedit was called.
+ my ($oldrev)=$params{token}=~/^([0-9]+)$/; # untaint
+ my $rev=cvs_info("Repository revision", "$config{srcdir}/$params{file}");
+ if (defined $rev && defined $oldrev && $rev != $oldrev) {
+ # Merge their changes into the file that we've
+ # changed.
+ cvs_runcvs('update', $params{file}) ||
+ warn("cvs merge from $oldrev to $rev failed\n");
+ }
+
+ if (! cvs_runcvs('commit', '-m',
+ IkiWiki::possibly_foolish_untaint(commitmessage(%params)))) {
+ my $conflict=readfile("$config{srcdir}/$params{file}");
+ cvs_runcvs('update', '-C', $params{file}) ||
+ warn("cvs revert failed\n");
+ return $conflict;
+ }
+
+ return undef # success
+}
+
+sub rcs_commit_staged (@) {
+ # Commits all staged changes. Changes can be staged using rcs_add,
+ # rcs_remove, and rcs_rename.
+ my %params=@_;
+
+ if (! cvs_runcvs('commit', '-m',
+ IkiWiki::possibly_foolish_untaint(commitmessage(%params)))) {
+ warn "cvs staged commit failed\n";
+ return 1; # failure
+ }
+ return undef # success
+}
+
+sub rcs_add ($) {
+ # filename is relative to the root of the srcdir
+ my $file=shift;
+ my $parent=IkiWiki::dirname($file);
+ my @files_to_add = ($file);
+
+ eval q{use File::MimeInfo};
+ error($@) if $@;
+
+ until ((length($parent) == 0) || cvs_is_controlling("$config{srcdir}/$parent")){
+ push @files_to_add, $parent;
+ $parent = IkiWiki::dirname($parent);
+ }
+
+ while ($file = pop @files_to_add) {
+ if (@files_to_add == 0) {
+ # file
+ my $filemime = File::MimeInfo::default($file);
+ if (defined($filemime) && $filemime eq 'text/plain') {
+ cvs_runcvs('add', $file) ||
+ warn("cvs add $file failed\n");
+ }
+ else {
+ cvs_runcvs('add', '-kb', $file) ||
+ warn("cvs add binary $file failed\n");
+ }
+ }
+ else {
+ # directory
+ cvs_runcvs('add', $file) ||
+ warn("cvs add $file failed\n");
+ }
+ }
+}
+
+sub rcs_remove ($) {
+ # filename is relative to the root of the srcdir
+ my $file=shift;
+
+ return unless cvs_is_controlling;
+
+ cvs_runcvs('rm', '-f', $file) ||
+ warn("cvs rm $file failed\n");
+}
+
+sub rcs_rename ($$) {
+ # filenames relative to the root of the srcdir
+ my ($src, $dest)=@_;
+
+ return unless cvs_is_controlling;
+
+ local $CWD = $config{srcdir};
+
+ if (system("mv", "$src", "$dest") != 0) {
+ warn("filesystem rename failed\n");
+ }
+
+ rcs_add($dest);
+ rcs_remove($src);
+}
+
+sub rcs_recentchanges ($) {
+ my $num = shift;
+ my @ret;
+
+ return unless cvs_is_controlling;
+
+ eval q{use Date::Parse};
+ error($@) if $@;
+
+ local $CWD = $config{srcdir};
+
+ # There's no cvsps option to get the last N changesets.
+ # Write full output to a temp file and read backwards.
+
+ eval q{use File::Temp qw/tempfile/};
+ error($@) if $@;
+ eval q{use File::ReadBackwards};
+ error($@) if $@;
+
+ my ($tmphandle, $tmpfile) = tempfile();
+ system("env TZ=UTC cvsps -q --cvs-direct -z 30 -x >$tmpfile");
+ if ($? == -1) {
+ error "couldn't run cvsps: $!\n";
+ }
+ elsif (($? >> 8) != 0) {
+ error "cvsps exited " . ($? >> 8) . ": $!\n";
+ }
+
+ tie(*SPSVC, 'File::ReadBackwards', $tmpfile)
+ || error "couldn't open $tmpfile for read: $!\n";
+
+ while (my $line = <SPSVC>) {
+ $line =~ /^$/ || error "expected blank line, got $line";
+
+ my ($rev, $user, $committype, $when);
+ my (@message, @pages);
+
+ # We're reading backwards.
+ # Forwards, an entry looks like so:
+ # ---------------------
+ # PatchSet $rev
+ # Date: $when
+ # Author: $user (or user CGI runs as, for web commits)
+ # Branch: branch
+ # Tag: tag
+ # Log:
+ # @message_lines
+ # Members:
+ # @pages (and revisions)
+ #
+
+ while ($line = <SPSVC>) {
+ last if ($line =~ /^Members:/);
+ for ($line) {
+ s/^\s+//;
+ s/\s+$//;
+ }
+ my ($page, $revs) = split(/:/, $line);
+ my ($oldrev, $newrev) = split(/->/, $revs);
+ $oldrev =~ s/INITIAL/0/;
+ $newrev =~ s/\(DEAD\)//;
+ my $diffurl = defined $config{diffurl} ? $config{diffurl} : "";
+ $diffurl=~s/\[\[file\]\]/$page/g;
+ $diffurl=~s/\[\[r1\]\]/$oldrev/g;
+ $diffurl=~s/\[\[r2\]\]/$newrev/g;
+ unshift @pages, {
+ page => pagename($page),
+ diffurl => $diffurl,
+ } if length $page;
+ }
+
+ while ($line = <SPSVC>) {
+ last if ($line =~ /^Log:$/);
+ chomp $line;
+ unshift @message, { line => $line };
+ }
+ $committype = "web";
+ if (defined $message[0] &&
+ $message[0]->{line}=~/$config{web_commit_regexp}/) {
+ $user=defined $2 ? "$2" : "$3";
+ $message[0]->{line}=$4;
+ }
+ else {
+ $committype="cvs";
+ }
+
+ $line = <SPSVC>; # Tag
+ $line = <SPSVC>; # Branch
+
+ $line = <SPSVC>;
+ if ($line =~ /^Author: (.*)$/) {
+ $user = $1 unless defined $user && length $user;
+ }
+ else {
+ error "expected Author, got $line";
+ }
+
+ $line = <SPSVC>;
+ if ($line =~ /^Date: (.*)$/) {
+ $when = str2time($1, 'UTC');
+ }
+ else {
+ error "expected Date, got $line";
+ }
+
+ $line = <SPSVC>;
+ if ($line =~ /^PatchSet (.*)$/) {
+ $rev = $1;
+ }
+ else {
+ error "expected PatchSet, got $line";
+ }
+
+ $line = <SPSVC>; # ---------------------
+
+ push @ret, {
+ rev => $rev,
+ user => $user,
+ committype => $committype,
+ when => $when,
+ message => [@message],
+ pages => [@pages],
+ } if @pages;
+ last if @ret >= $num;
+ }
+
+ unlink($tmpfile) || error "couldn't unlink $tmpfile: $!\n";
+
+ return @ret;
+}
+
+sub rcs_diff ($) {
+ my $rev=IkiWiki::possibly_foolish_untaint(int(shift));
+
+ local $CWD = $config{srcdir};
+
+ # diff output is unavoidably preceded by the cvsps PatchSet entry
+ my @cvsps = `env TZ=UTC cvsps -q --cvs-direct -z 30 -g -s $rev`;
+ my $blank_lines_seen = 0;
+
+ while (my $line = shift @cvsps) {
+ $blank_lines_seen++ if ($line =~ /^$/);
+ last if $blank_lines_seen == 2;
+ }
+
+ if (wantarray) {
+ return @cvsps;
+ }
+ else {
+ return join("", @cvsps);
+ }
+}
+
+sub rcs_getctime ($) {
+ my $file=shift;
+
+ local $CWD = $config{srcdir};
+
+ my $cvs_log_infoline=qr/^date: (.+);\s+author/;
+
+ open CVSLOG, "cvs -Q log -r1.1 '$file' |"
+ || error "couldn't get cvs log output: $!\n";
+
+ my $date;
+ while (<CVSLOG>) {
+ if (/$cvs_log_infoline/) {
+ $date=$1;
+ }
+ }
+ close CVSLOG || warn "cvs log $file exited $?";
+
+ if (! defined $date) {
+ warn "failed to parse cvs log for $file\n";
+ return 0;
+ }
+
+ eval q{use Date::Parse};
+ error($@) if $@;
+ $date=str2time($date, 'UTC');
+ debug("found ctime ".localtime($date)." for $file");
+ return $date;
+}
+
+sub rcs_getmtime ($) {
+ error "rcs_getmtime is not implemented for cvs\n"; # TODO
+}
+
+1
diff --git a/IkiWiki/Plugin/darcs.pm b/IkiWiki/Plugin/darcs.pm
index 9b62e70e4..0f63b8807 100644
--- a/IkiWiki/Plugin/darcs.pm
+++ b/IkiWiki/Plugin/darcs.pm
@@ -18,6 +18,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub silentsystem (@) {
@@ -51,7 +52,7 @@ sub darcs_info ($$$) {
return $_;
}
-sub file_in_vc($$) {
+sub file_in_vc ($$) {
my $repodir = shift;
my $file = shift;
@@ -62,23 +63,23 @@ sub file_in_vc($$) {
}
my $found=0;
while (<DARCS_MANIFEST>) {
- $found = 1, last if /^(\.\/)?$file$/;
+ $found = 1 if /^(\.\/)?$file$/;
}
close(DARCS_MANIFEST) or error("'darcs query manifest' exited " . $?);
return $found;
}
-sub darcs_rev($) {
+sub darcs_rev ($) {
my $file = shift; # Relative to the repodir.
my $repodir = $config{srcdir};
- return "" if (! file_in_vc($repodir, $file));
+ return "" unless file_in_vc($repodir, $file);
my $hash = darcs_info('hash', $repodir, $file);
return defined $hash ? $hash : "";
}
-sub checkconfig() {
+sub checkconfig () {
if (defined $config{darcs_wrapper} && length $config{darcs_wrapper}) {
push @{$config{wrappers}}, {
wrapper => $config{darcs_wrapper},
@@ -87,11 +88,12 @@ sub checkconfig() {
}
}
-sub getsetup() {
+sub getsetup () {
return
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
darcs_wrapper => {
type => "string",
@@ -138,14 +140,31 @@ sub rcs_prepedit ($) {
return $rev;
}
-sub rcs_commit ($$$;$$) {
+sub commitauthor (@) {
+ my %params=@_;
+
+ my $author="anon\@web";
+ if (defined $params{session}) {
+ if (defined $params{session}->param("name")) {
+ return $params{session}->param("name").'@web';
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ return $params{session}->remote_addr().'@web';
+ }
+ }
+ return 'anon@web';
+}
+
+sub rcs_commit (@) {
# Commit the page. Returns 'undef' on success and a version of the page
# with conflict markers on failure.
+ my %params=@_;
- my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
+ my ($file, $message, $token) =
+ ($params{file}, $params{message}, $params{token});
# Compute if the "revision" of $file changed.
- my $changed = darcs_rev($file) ne $rcstoken;
+ my $changed = darcs_rev($file) ne $token;
# Yes, the following is a bit convoluted.
if ($changed) {
@@ -153,7 +172,7 @@ sub rcs_commit ($$$;$$) {
rename("$config{srcdir}/$file", "$config{srcdir}/$file.save") or
error("failed to rename $file to $file.save: $!");
- # Roll the repository back to $rcstoken.
+ # Roll the repository back to $token.
# TODO. Can we be sure that no changes are lost? I think that
# we can, if we make sure that the 'darcs push' below will always
@@ -164,37 +183,28 @@ sub rcs_commit ($$$;$$) {
# TODO: 'yes | ...' needed? Doesn't seem so.
silentsystem('darcs', "revert", "--repodir", $config{srcdir}, "--all") == 0 ||
error("'darcs revert' failed");
- # Remove all patches starting at $rcstoken.
+ # Remove all patches starting at $token.
my $child = open(DARCS_OBLITERATE, "|-");
if (! $child) {
open(STDOUT, ">/dev/null");
exec('darcs', "obliterate", "--repodir", $config{srcdir},
- "--match", "hash " . $rcstoken) and
+ "--match", "hash " . $token) and
error("'darcs obliterate' failed");
}
1 while print DARCS_OBLITERATE "y";
close(DARCS_OBLITERATE);
- # Restore the $rcstoken one.
+ # Restore the $token one.
silentsystem('darcs', "pull", "--quiet", "--repodir", $config{srcdir},
- "--match", "hash " . $rcstoken, "--all") == 0 ||
+ "--match", "hash " . $token, "--all") == 0 ||
error("'darcs pull' failed");
- # We're back at $rcstoken. Re-install the modified file.
+ # We're back at $token. Re-install the modified file.
rename("$config{srcdir}/$file.save", "$config{srcdir}/$file") or
error("failed to rename $file.save to $file: $!");
}
# Record the changes.
- my $author;
- if (defined $user) {
- $author = "$user\@web";
- }
- elsif (defined $ipaddr) {
- $author = "$ipaddr\@web";
- }
- else {
- $author = "anon\@web";
- }
+ my $author=commitauthor(%params);
if (!defined $message || !length($message)) {
$message = "empty message";
}
@@ -209,13 +219,13 @@ sub rcs_commit ($$$;$$) {
# If this updating yields any conflicts, we'll record them now to resolve
# them. If nothing is recorded, there are no conflicts.
- $rcstoken = darcs_rev($file);
+ $token = darcs_rev($file);
# TODO: Use only the first line here, i.e. only the patch name?
writefile("$file.log", $config{srcdir}, 'resolve conflicts: ' . $message);
silentsystem('darcs', 'record', '--repodir', $config{srcdir}, '--all',
'-m', 'resolve conflicts: ' . $message, '--author', $author, $file) == 0 ||
error("'darcs record' failed");
- my $conflicts = darcs_rev($file) ne $rcstoken;
+ my $conflicts = darcs_rev($file) ne $token;
unlink("$config{srcdir}/$file.log") or
error("failed to remove '$file.log'");
@@ -237,25 +247,18 @@ sub rcs_commit ($$$;$$) {
}
}
-sub rcs_commit_staged($$$) {
- my ($message, $user, $ipaddr) = @_;
+sub rcs_commit_staged (@) {
+ my %params=@_;
- my $author;
- if (defined $user) {
- $author = "$user\@web";
- }
- elsif (defined $ipaddr) {
- $author = "$ipaddr\@web";
- }
- else {
- $author = "anon\@web";
- }
- if (!defined $message || !length($message)) {
- $message = "empty message";
+ my $author=commitauthor(%params);
+ if (!defined $params{message} || !length($params{message})) {
+ $params{message} = "empty message";
}
- silentsystem('darcs', "record", "--repodir", $config{srcdir}, "-a", "-A", $author,
- "-m", $message) == 0 || error("'darcs record' failed");
+ silentsystem('darcs', "record", "--repodir", $config{srcdir},
+ "-a", "-A", $author,
+ "-m", $params{message},
+ ) == 0 || error("'darcs record' failed");
# Push the changes to the main repository.
silentsystem('darcs', 'push', '--quiet', '--repodir', $config{srcdir}, '--all') == 0 ||
@@ -318,9 +321,9 @@ sub rcs_recentchanges ($) {
my $hash=$patch->{hash};
my $when=str2time($date);
my (@pages, @files, @pg);
- push @pages, $_ for (@{$patch->{summary}->[0]->{modify_file}});
- push @pages, $_ for (@{$patch->{summary}->[0]->{add_file}});
- push @pages, $_ for (@{$patch->{summary}->[0]->{remove_file}});
+ push @pages, $_ foreach (@{$patch->{summary}->[0]->{modify_file}});
+ push @pages, $_ foreach (@{$patch->{summary}->[0]->{add_file}});
+ push @pages, $_ foreach (@{$patch->{summary}->[0]->{remove_file}});
foreach my $f (@pages) {
$f = $f->{content} if ref $f;
$f =~ s,^\s+,,; $f =~ s,\s+$,,; # cut whitespace
@@ -393,14 +396,11 @@ sub rcs_getctime ($) {
eval q{use XML::Simple};
local $/=undef;
- my $filer=substr($file, length($config{srcdir}));
- $filer =~ s:^[/]+::;
-
my $child = open(LOG, "-|");
if (! $child) {
exec("darcs", "changes", "--xml", "--reverse",
- "--repodir", $config{srcdir}, $filer)
- || error("'darcs changes $filer' failed to run");
+ "--repodir", $config{srcdir}, $file)
+ || error("'darcs changes $file' failed to run");
}
my $data;
@@ -415,7 +415,7 @@ sub rcs_getctime ($) {
my $datestr = $log->{patch}[0]->{local_date};
if (! defined $datestr) {
- warn "failed to get ctime for $filer";
+ warn "failed to get ctime for $file";
return 0;
}
@@ -426,4 +426,8 @@ sub rcs_getctime ($) {
return $date;
}
+sub rcs_getmtime ($) {
+ error "rcs_getmtime is not implemented for darcs\n"; # TODO
+}
+
1
diff --git a/IkiWiki/Plugin/date.pm b/IkiWiki/Plugin/date.pm
new file mode 100644
index 000000000..ea5c9a9c5
--- /dev/null
+++ b/IkiWiki/Plugin/date.pm
@@ -0,0 +1,34 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::date;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "date", call => \&getsetup);
+ hook(type => "preprocess", id => "date", call => \&preprocess);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ section => "widget",
+ },
+}
+
+sub preprocess (@) {
+ my $str=shift;
+
+ eval q{use Date::Parse};
+ error $@ if $@;
+ my $time = str2time($str);
+ if (! defined $time) {
+ error("unable to parse $str");
+ }
+ return displaytime($time);
+}
+
+1
diff --git a/IkiWiki/Plugin/editdiff.pm b/IkiWiki/Plugin/editdiff.pm
index 7df6a9ffb..015ce9c14 100644
--- a/IkiWiki/Plugin/editdiff.pm
+++ b/IkiWiki/Plugin/editdiff.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "web",
},
}
@@ -70,7 +71,7 @@ sub formbuilder_setup {
$content=~s/\r/\n/g;
my $diff = diff(srcfile($pagesources{$page}), $content);
- $form->tmpl_param("page_preview", $diff);
+ $form->tmpl_param("page_diff", $diff);
}
}
diff --git a/IkiWiki/Plugin/editpage.pm b/IkiWiki/Plugin/editpage.pm
index 467cd9ed5..1a04a72b5 100644
--- a/IkiWiki/Plugin/editpage.pm
+++ b/IkiWiki/Plugin/editpage.pm
@@ -17,6 +17,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "core",
},
}
@@ -63,7 +64,7 @@ sub cgi_editpage ($$) {
decode_cgi_utf8($q);
- my @fields=qw(do rcsinfo subpage from page type editcontent comments);
+ my @fields=qw(do rcsinfo subpage from page type editcontent editmessage);
my @buttons=("Save Page", "Preview", "Cancel");
eval q{use CGI::FormBuilder};
error($@) if $@;
@@ -77,7 +78,7 @@ sub cgi_editpage ($$) {
action => $config{cgiurl},
header => 0,
table => 0,
- template => scalar template_params("editpage.tmpl"),
+ template => { template("editpage.tmpl") },
);
decode_form_utf8($form);
@@ -91,9 +92,9 @@ sub cgi_editpage ($$) {
# wiki_file_regexp.
my ($page)=$form->field('page')=~/$config{wiki_file_regexp}/;
$page=possibly_foolish_untaint($page);
- my $absolute=($page =~ s#^/+##);
+ my $absolute=($page =~ s#^/+##); # absolute name used to force location
if (! defined $page || ! length $page ||
- file_pruned($page, $config{srcdir})) {
+ file_pruned($page)) {
error(gettext("bad page name"));
}
@@ -143,16 +144,16 @@ sub cgi_editpage ($$) {
$form->field(name => "subpage", type => 'hidden');
$form->field(name => "page", value => $page, force => 1);
$form->field(name => "type", value => $type, force => 1);
- $form->field(name => "comments", type => "text", size => 80);
+ $form->field(name => "editmessage", type => "text", size => 80);
$form->field(name => "editcontent", type => "textarea", rows => 20,
cols => 80);
$form->tmpl_param("can_commit", $config{rcs});
- $form->tmpl_param("indexlink", indexlink());
$form->tmpl_param("helponformattinglink",
htmllink($page, $page, "ikiwiki/formatting",
noimageinline => 1,
linktext => "FormattingHelp"));
+ my $previewing=0;
if ($form->submitted eq "Cancel") {
if ($form->field("do") eq "create" && defined $from) {
redirect($q, urlto($from, undef, 1));
@@ -166,11 +167,11 @@ sub cgi_editpage ($$) {
exit;
}
elsif ($form->submitted eq "Preview") {
+ $previewing=1;
+
my $new=not exists $pagesources{$page};
- if ($new) {
- # temporarily record its type
- $pagesources{$page}=$page.".".$type;
- }
+ # temporarily record its type
+ $pagesources{$page}=$page.".".$type if $new;
my %wasrendered=map { $_ => 1 } @{$renderedfiles{$page}};
my $content=$form->field('editcontent');
@@ -195,18 +196,17 @@ sub cgi_editpage ($$) {
});
$form->tmpl_param("page_preview", $preview);
- if ($new) {
- delete $pagesources{$page};
- }
-
# Previewing may have created files on disk.
# Keep a list of these to be deleted later.
my %previews = map { $_ => 1 } @{$wikistate{editpage}{previews}};
foreach my $f (@{$renderedfiles{$page}}) {
$previews{$f}=1 unless $wasrendered{$f};
}
+
+ # Throw out any other state changes made during previewing,
+ # and save the previews list.
+ loadindex();
@{$wikistate{editpage}{previews}} = keys %previews;
- $renderedfiles{$page}=[keys %wasrendered];
saveindex();
}
elsif ($form->submitted eq "Save Page") {
@@ -219,8 +219,7 @@ sub cgi_editpage ($$) {
my $best_loc;
if (! defined $from || ! length $from ||
$from ne $form->field('from') ||
- file_pruned($from, $config{srcdir}) ||
- $from=~/^\// ||
+ file_pruned($from) ||
$absolute ||
$form->submitted) {
@page_locs=$best_loc=$page;
@@ -229,8 +228,9 @@ sub cgi_editpage ($$) {
my $dir=$from."/";
$dir=~s![^/]+/+$!!;
- if ((defined $form->field('subpage') && length $form->field('subpage')) ||
- $page eq lc(gettext('Discussion'))) {
+ if ((defined $form->field('subpage') &&
+ length $form->field('subpage')) ||
+ $page eq lc($config{discussionpage})) {
$best_loc="$from/$page";
}
else {
@@ -244,8 +244,9 @@ sub cgi_editpage ($$) {
push @page_locs, $dir.$page;
}
- push @page_locs, "$config{userdir}/$page"
- if length $config{userdir};
+ my $userpage=IkiWiki::userpage($page);
+ push @page_locs, $userpage
+ if ! grep { $_ eq $userpage } @page_locs;
}
@page_locs = grep {
@@ -254,7 +255,7 @@ sub cgi_editpage ($$) {
if (! @page_locs) {
# hmm, someone else made the page in the
# meantime?
- if ($form->submitted eq "Preview") {
+ if ($previewing) {
# let them go ahead with the edit
# and resolve the conflict at save
# time
@@ -270,8 +271,10 @@ sub cgi_editpage ($$) {
check_canedit($_, $q, $session, 1)
} @page_locs;
if (! @editable_locs) {
- # let it throw an error this time
- map { check_canedit($_, $q, $session) } @page_locs;
+ # now let it throw an error, or prompt for
+ # login
+ map { check_canedit($_, $q, $session) }
+ ($best_loc, @page_locs);
}
my @page_types;
@@ -309,7 +312,8 @@ sub cgi_editpage ($$) {
$form->title(sprintf(gettext("editing %s"), pagetitle($page)));
}
- showform($form, \@buttons, $session, $q, forcebaseurl => $baseurl);
+ showform($form, \@buttons, $session, $q,
+ forcebaseurl => $baseurl, page => $page);
}
else {
# save page
@@ -326,7 +330,8 @@ sub cgi_editpage ($$) {
$form->field(name => "page", type => 'hidden');
$form->field(name => "type", type => 'hidden');
$form->title(sprintf(gettext("editing %s"), $page));
- showform($form, \@buttons, $session, $q, forcebaseurl => $baseurl);
+ showform($form, \@buttons, $session, $q,
+ forcebaseurl => $baseurl, page => $page);
exit;
}
elsif ($form->field("do") eq "create" && $exists) {
@@ -340,14 +345,15 @@ sub cgi_editpage ($$) {
value => readfile("$config{srcdir}/$file").
"\n\n\n".$form->field("editcontent"),
force => 1);
- showform($form, \@buttons, $session, $q, forcebaseurl => $baseurl);
+ showform($form, \@buttons, $session, $q,
+ forcebaseurl => $baseurl, page => $page);
exit;
}
my $message="";
- if (defined $form->field('comments') &&
- length $form->field('comments')) {
- $message=$form->field('comments');
+ if (defined $form->field('editmessage') &&
+ length $form->field('editmessage')) {
+ $message=$form->field('editmessage');
}
my $content=$form->field('editcontent');
@@ -381,7 +387,7 @@ sub cgi_editpage ($$) {
$form->field(name => "type", type => 'hidden');
$form->title(sprintf(gettext("editing %s"), $page));
showform($form, \@buttons, $session, $q,
- forcebaseurl => $baseurl);
+ forcebaseurl => $baseurl, page => $page);
exit;
}
@@ -395,9 +401,12 @@ sub cgi_editpage ($$) {
# signaling to it that it should not try to
# do anything.
disable_commit_hook();
- $conflict=rcs_commit($file, $message,
- $form->field("rcsinfo"),
- $session->param("name"), $ENV{REMOTE_ADDR});
+ $conflict=rcs_commit(
+ file => $file,
+ message => $message,
+ token => $form->field("rcsinfo"),
+ session => $session,
+ );
enable_commit_hook();
rcs_update();
}
@@ -420,7 +429,7 @@ sub cgi_editpage ($$) {
$form->field(name => "type", type => 'hidden');
$form->title(sprintf(gettext("editing %s"), $page));
showform($form, \@buttons, $session, $q,
- forcebaseurl => $baseurl);
+ forcebaseurl => $baseurl, page => $page);
}
else {
# The trailing question mark tries to avoid broken
diff --git a/IkiWiki/Plugin/edittemplate.pm b/IkiWiki/Plugin/edittemplate.pm
index 0bafc95d0..226f83bb4 100644
--- a/IkiWiki/Plugin/edittemplate.pm
+++ b/IkiWiki/Plugin/edittemplate.pm
@@ -23,6 +23,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "web",
},
}
@@ -55,10 +56,17 @@ sub preprocess (@) {
}
my $link=linkpage($params{template});
- $pagestate{$params{page}}{edittemplate}{$params{match}}=$link;
+ add_depends($params{page}, $link, deptype("presence"));
+ my $bestlink=bestlink($params{page}, $link);
+ if (! length $bestlink) {
+ add_depends($params{page}, "templates/$link", deptype("presence"));
+ $link="/templates/".$link;
+ $bestlink=bestlink($params{page}, $link);
+ }
+ $pagestate{$params{page}}{edittemplate}{$params{match}}=$bestlink;
- return "" if ($params{silent} && IkiWiki::yesno($params{silent}));
- add_depends($params{page}, $link);
+ return "" if ($params{silent} && IkiWiki::yesno($params{silent})) &&
+ length $bestlink;
return sprintf(gettext("edittemplate %s registered for %s"),
htmllink($params{page}, $params{destpage}, $link),
$params{match});
@@ -82,10 +90,13 @@ sub formbuilder (@) {
foreach my $field ($form->field) {
if ($field eq 'page') {
@page_locs=$field->def_value;
- push @page_locs, $field->options;
+
+ # FormBuilder is on the bad crack. See #551499
+ my @options=map { ref $_ ? @$_ : $_ } $field->options;
+
+ push @page_locs, @options;
}
}
-
foreach my $p (@page_locs) {
foreach my $registering_page (keys %pagestate) {
if (exists $pagestate{$registering_page}{edittemplate}) {
@@ -109,28 +120,18 @@ sub filltemplate ($$) {
my $template_page=shift;
my $page=shift;
- my $template_file=$pagesources{$template_page};
- if (! defined $template_file) {
- return;
- }
-
my $template;
eval {
- $template=HTML::Template->new(
- filter => sub {
- my $text_ref = shift;
- $$text_ref=&Encode::decode_utf8($$text_ref);
- chomp $$text_ref;
- },
- filename => srcfile($template_file),
- die_on_bad_params => 0,
- no_includes => 1,
- );
+ # force page name absolute so it doesn't look in templates/
+ $template=template("/".$template_page);
};
if ($@) {
# Indicate that the earlier preprocessor directive set
# up a template that doesn't work.
- return "[[!pagetemplate ".gettext("failed to process")." $@]]";
+ return "[[!pagetemplate ".gettext("failed to process template:")." $@]]";
+ }
+ if (! defined $template) {
+ return;
}
$template->param(name => $page);
diff --git a/IkiWiki/Plugin/external.pm b/IkiWiki/Plugin/external.pm
index 0d292dfc2..ec91c79db 100644
--- a/IkiWiki/Plugin/external.pm
+++ b/IkiWiki/Plugin/external.pm
@@ -8,7 +8,6 @@ use warnings;
use strict;
use IkiWiki 3.00;
use RPC::XML;
-use RPC::XML::Parser;
use IPC::Open2;
use IO::Handle;
@@ -55,7 +54,19 @@ sub rpc_call ($$;@) {
$plugin->{accum}.=$_;
while ($plugin->{accum} =~ /^\s*(<\?xml\s.*?<\/(?:methodCall|methodResponse)>)\n(.*)/s) {
$plugin->{accum}=$2;
- my $r = RPC::XML::Parser->new->parse($1);
+ my $parser;
+ eval q{
+ use RPC::XML::ParserFactory;
+ $parser = RPC::XML::ParserFactory->new;
+ };
+ if ($@) {
+ # old interface
+ eval q{
+ use RPC::XML::Parser;
+ $parser = RPC::XML::Parser->new;
+ };
+ }
+ my $r=$parser->parse($1);
error("XML RPC parser failure: $r") unless ref $r;
if ($r->isa('RPC::XML::response')) {
my $value=$r->value;
@@ -72,9 +83,9 @@ sub rpc_call ($$;@) {
# XML-RPC v1 does not allow for
# nil/null/None/undef values to be
- # transmitted, so until
- # XML::RPC::Parser honours v2
- # (<nil/>), external plugins send
+ # transmitted. The <nil/> extension
+ # is the right fix, but for
+ # back-compat, let external plugins send
# a hash with one key "null" pointing
# to an empty string.
if (exists $hash{null} &&
diff --git a/IkiWiki/Plugin/filecheck.pm b/IkiWiki/Plugin/filecheck.pm
index 01d490961..a78058ffe 100644
--- a/IkiWiki/Plugin/filecheck.pm
+++ b/IkiWiki/Plugin/filecheck.pm
@@ -5,7 +5,7 @@ use warnings;
use strict;
use IkiWiki 3.00;
-my %units=( #{{{ # size in bytes
+my %units=( # size in bytes
B => 1,
byte => 1,
KB => 2 ** 10,
@@ -39,6 +39,19 @@ my %units=( #{{{ # size in bytes
# -- Joey
);
+sub import {
+ hook(type => "getsetup", id => "filecheck", call => \&getsetup);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ section => "misc",
+ },
+}
+
sub parsesize ($) {
my $size=shift;
@@ -75,9 +88,9 @@ sub match_maxsize ($$;@) {
}
my %params=@_;
- my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
+ my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
if (! defined $file) {
- return IkiWiki::ErrorReason->new("no file specified");
+ return IkiWiki::ErrorReason->new("file does not exist");
}
if (-s $file > $maxsize) {
@@ -96,9 +109,9 @@ sub match_minsize ($$;@) {
}
my %params=@_;
- my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
+ my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
if (! defined $file) {
- return IkiWiki::ErrorReason->new("no file specified");
+ return IkiWiki::ErrorReason->new("file does not exist");
}
if (-s $file < $minsize) {
@@ -114,20 +127,33 @@ sub match_mimetype ($$;@) {
my $wanted=shift;
my %params=@_;
- my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
+ my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
if (! defined $file) {
- return IkiWiki::ErrorReason->new("no file specified");
+ return IkiWiki::ErrorReason->new("file does not exist");
}
- # Use ::magic to get the mime type, the idea is to only trust
- # data obtained by examining the actual file contents.
+ # Get the mime type.
+ #
+ # First, try File::Mimeinfo. This is fast, but doesn't recognise
+ # all files.
eval q{use File::MimeInfo::Magic};
- if ($@) {
- return IkiWiki::ErrorReason->new("failed to load File::MimeInfo::Magic ($@); cannot check MIME type");
+ my $mimeinfo_ok=! $@;
+ my $mimetype;
+ if ($mimeinfo_ok) {
+ my $mimetype=File::MimeInfo::Magic::magic($file);
}
- my $mimetype=File::MimeInfo::Magic::magic($file);
+
+ # Fall back to using file, which has a more complete
+ # magic database.
if (! defined $mimetype) {
- $mimetype=File::MimeInfo::Magic::default($file);
+ open(my $file_h, "-|", "file", "-bi", $file);
+ $mimetype=<$file_h>;
+ close $file_h;
+ }
+ if (! defined $mimetype || $mimetype !~s /;.*//) {
+ # Fall back to default value.
+ $mimetype=File::MimeInfo::Magic::default($file)
+ if $mimeinfo_ok;
if (! defined $mimetype) {
$mimetype="unknown";
}
@@ -147,9 +173,9 @@ sub match_virusfree ($$;@) {
my $wanted=shift;
my %params=@_;
- my $file=exists $params{file} ? $params{file} : $IkiWiki::pagesources{$page};
+ my $file=exists $params{file} ? $params{file} : IkiWiki::srcfile($IkiWiki::pagesources{$page});
if (! defined $file) {
- return IkiWiki::ErrorReason->new("no file specified");
+ return IkiWiki::ErrorReason->new("file does not exist");
}
if (! exists $IkiWiki::config{virus_checker} ||
diff --git a/IkiWiki/Plugin/flattr.pm b/IkiWiki/Plugin/flattr.pm
new file mode 100644
index 000000000..3aee1eb93
--- /dev/null
+++ b/IkiWiki/Plugin/flattr.pm
@@ -0,0 +1,97 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::flattr;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "flattr", call => \&getsetup);
+ hook(type => "preprocess", id => "flattr", call => \&preprocess);
+ hook(type => "format", id => "flattr", call => \&format);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ },
+ flattr_userid => {
+ type => "string",
+ example => 'joeyh',
+ description => "userid or user name to use by default for Flattr buttons",
+ advanced => 0,
+ safe => 1,
+ rebuild => undef,
+ },
+}
+
+my %flattr_pages;
+
+sub preprocess (@) {
+ my %params=@_;
+
+ $flattr_pages{$params{destpage}}=1;
+
+ my $url=$params{url};
+ if (! defined $url) {
+ $url=urlto($params{page}, "", 1);
+ }
+
+ my @fields;
+ foreach my $field (qw{language uid button hidden category tags}) {
+ if (exists $params{$field}) {
+ push @fields, "$field:$params{$field}";
+ }
+ }
+
+ return '<a class="FlattrButton" href="'.$url.'"'.
+ (exists $params{title} ? ' title="'.$params{title}.'"' : '').
+ ' rev="flattr;'.join(';', @fields).';"'.
+ '>'.
+ (exists $params{description} ? $params{description} : '').
+ '</a>';
+}
+
+sub format (@) {
+ my %params=@_;
+
+ # Add flattr's javascript to pages with flattr buttons.
+ if ($flattr_pages{$params{page}}) {
+ if (! ($params{content}=~s!^(<body[^>]*>)!$1.flattrjs()!em)) {
+ # no <body> tag, probably in preview mode
+ $params{content}=flattrjs().$params{content};
+ }
+ }
+ return $params{content};
+}
+
+my $js_cached;
+sub flattrjs {
+ return $js_cached if defined $js_cached;
+
+ my $js_url='https://api.flattr.com/js/0.5.0/load.js?mode=auto';
+ if (defined $config{flattr_userid}) {
+ my $userid=$config{flattr_userid};
+ $userid=~s/[^-A-Za-z0-9_]//g; # sanitize for inclusion in javascript
+ $js_url.="&uid=$userid";
+ }
+
+ # This is Flattr's standard javascript snippet to include their
+ # external javascript file, asynchronously.
+ return $js_cached=<<"EOF";
+<script type="text/javascript">
+<!--//--><![CDATA[//><!--
+(function() {
+ var s = document.createElement('script'), t = document.getElementsByTagName('script')[0];
+ s.type = 'text/javascript';
+ s.async = true;
+ s.src = '$js_url';
+ t.parentNode.insertBefore(s, t);
+})();//--><!]]>
+</script>
+EOF
+}
+
+1
diff --git a/IkiWiki/Plugin/format.pm b/IkiWiki/Plugin/format.pm
index 1513cbed7..d54e71131 100644
--- a/IkiWiki/Plugin/format.pm
+++ b/IkiWiki/Plugin/format.pm
@@ -7,6 +7,16 @@ use IkiWiki 3.00;
sub import {
hook(type => "preprocess", id => "format", call => \&preprocess);
+ hook(type => "getsetup", id => "format", call => \&getsetup);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ section => "widget",
+ },
}
sub preprocess (@) {
diff --git a/IkiWiki/Plugin/fortune.pm b/IkiWiki/Plugin/fortune.pm
index 17e57dea1..f481c7eac 100644
--- a/IkiWiki/Plugin/fortune.pm
+++ b/IkiWiki/Plugin/fortune.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
diff --git a/IkiWiki/Plugin/getsource.pm b/IkiWiki/Plugin/getsource.pm
new file mode 100644
index 000000000..b362de726
--- /dev/null
+++ b/IkiWiki/Plugin/getsource.pm
@@ -0,0 +1,94 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::getsource;
+
+use warnings;
+use strict;
+use IkiWiki;
+use open qw{:utf8 :std};
+
+sub import {
+ hook(type => "getsetup", id => "getsource", call => \&getsetup);
+ hook(type => "pagetemplate", id => "getsource", call => \&pagetemplate);
+ hook(type => "cgi", id => "getsource", call => \&cgi_getsource);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => 1,
+ section => "web",
+ },
+ getsource_mimetype => {
+ type => "string",
+ example => "text/plain; charset=utf-8",
+ description => "Mime type for returned source.",
+ safe => 1,
+ rebuild => 0,
+ },
+}
+
+sub pagetemplate (@) {
+ my %params=@_;
+
+ my $page=$params{page};
+ my $template=$params{template};
+
+ if (length $config{cgiurl}) {
+ $template->param(getsourceurl => IkiWiki::cgiurl(do => "getsource", page => $page));
+ $template->param(have_actions => 1);
+ }
+}
+
+sub cgi_getsource ($) {
+ my $cgi=shift;
+
+ return unless defined $cgi->param('do') &&
+ $cgi->param("do") eq "getsource";
+
+ IkiWiki::decode_cgi_utf8($cgi);
+
+ my $page=$cgi->param('page');
+
+ if (! defined $page || $page !~ /$config{wiki_file_regexp}/) {
+ error("invalid page parameter");
+ }
+
+ # For %pagesources.
+ IkiWiki::loadindex();
+
+ if (! exists $pagesources{$page}) {
+ IkiWiki::cgi_custom_failure(
+ $cgi,
+ "404 Not Found",
+ IkiWiki::misctemplate(gettext("missing page"),
+ "<p>".
+ sprintf(gettext("The page %s does not exist."),
+ htmllink("", "", $page)).
+ "</p>"));
+ exit;
+ }
+
+ if (! defined pagetype($pagesources{$page})) {
+ IkiWiki::cgi_custom_failure(
+ $cgi->header(-status => "403 Forbidden"),
+ IkiWiki::misctemplate(gettext("not a page"),
+ "<p>".
+ sprintf(gettext("%s is an attachment, not a page."),
+ htmllink("", "", $page)).
+ "</p>"));
+ exit;
+ }
+
+ if (! $config{getsource_mimetype}) {
+ $config{getsource_mimetype} = "text/plain; charset=utf-8";
+ }
+
+ print "Content-Type: $config{getsource_mimetype}\r\n";
+ print ("\r\n");
+ print readfile(srcfile($pagesources{$page}));
+
+ exit 0;
+}
+
+1
diff --git a/IkiWiki/Plugin/git.pm b/IkiWiki/Plugin/git.pm
index 68b114a73..d342a7398 100644
--- a/IkiWiki/Plugin/git.pm
+++ b/IkiWiki/Plugin/git.pm
@@ -14,6 +14,7 @@ my $no_chdir=0;
sub import {
hook(type => "checkconfig", id => "git", call => \&checkconfig);
hook(type => "getsetup", id => "git", call => \&getsetup);
+ hook(type => "genwrapper", id => "git", call => \&genwrapper);
hook(type => "rcs", id => "rcs_update", call => \&rcs_update);
hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit);
hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit);
@@ -24,6 +25,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
hook(type => "rcs", id => "rcs_receive", call => \&rcs_receive);
}
@@ -39,16 +41,30 @@ sub checkconfig () {
push @{$config{wrappers}}, {
wrapper => $config{git_wrapper},
wrappermode => (defined $config{git_wrappermode} ? $config{git_wrappermode} : "06755"),
+ wrapper_background_command => $config{git_wrapper_background_command},
};
}
+
if (defined $config{git_test_receive_wrapper} &&
- length $config{git_test_receive_wrapper}) {
+ length $config{git_test_receive_wrapper} &&
+ defined $config{untrusted_committers} &&
+ @{$config{untrusted_committers}}) {
push @{$config{wrappers}}, {
test_receive => 1,
wrapper => $config{git_test_receive_wrapper},
wrappermode => (defined $config{git_wrappermode} ? $config{git_wrappermode} : "06755"),
};
}
+
+ # Avoid notes, parser does not handle and they only slow things down.
+ $ENV{GIT_NOTES_REF}="";
+
+ # Run receive test only if being called by the wrapper, and not
+ # when generating same.
+ if ($config{test_receive} && ! exists $config{wrapper}) {
+ require IkiWiki::Receive;
+ IkiWiki::Receive::test();
+ }
}
sub getsetup () {
@@ -56,6 +72,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
git_wrapper => {
type => "string",
@@ -64,6 +81,13 @@ sub getsetup () {
safe => 0, # file
rebuild => 0,
},
+ git_wrapper_background_command => {
+ type => "string",
+ example => "git push github",
+ description => "shell command for git_wrapper to run, in the background",
+ safe => 0, # command
+ rebuild => 0,
+ },
git_wrappermode => {
type => "string",
example => '06755',
@@ -87,7 +111,7 @@ sub getsetup () {
},
historyurl => {
type => "string",
- example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=history;f=[[file]]",
+ example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=history;f=[[file]];hb=HEAD",
description => "gitweb url to show file history ([[file]] substituted)",
safe => 1,
rebuild => 1,
@@ -115,6 +139,16 @@ sub getsetup () {
},
}
+sub genwrapper {
+ if ($config{test_receive}) {
+ require IkiWiki::Receive;
+ return IkiWiki::Receive::genwrapper();
+ }
+ else {
+ return "";
+ }
+}
+
sub safe_git (&@) {
# Start a child process safely without resorting /bin/sh.
# Return command output or success state (in scalar context).
@@ -256,11 +290,35 @@ sub merge_past ($$$) {
return $conflict;
}
-sub parse_diff_tree ($@) {
+{
+my $prefix;
+sub decode_git_file ($) {
+ my $file=shift;
+
+ # git does not output utf-8 filenames, but instead
+ # double-quotes them with the utf-8 characters
+ # escaped as \nnn\nnn.
+ if ($file =~ m/^"(.*)"$/) {
+ ($file=$1) =~ s/\\([0-7]{1,3})/chr(oct($1))/eg;
+ }
+
+ # strip prefix if in a subdir
+ if (! defined $prefix) {
+ ($prefix) = run_or_die('git', 'rev-parse', '--show-prefix');
+ if (! defined $prefix) {
+ $prefix="";
+ }
+ }
+ $file =~ s/^\Q$prefix\E//;
+
+ return decode("utf8", $file);
+}
+}
+
+sub parse_diff_tree ($) {
# Parse the raw diff tree chunk and return the info hash.
# See git-diff-tree(1) for the syntax.
-
- my ($prefix, $dt_ref) = @_;
+ my $dt_ref = shift;
# End of stream?
return if !defined @{ $dt_ref } ||
@@ -294,8 +352,9 @@ sub parse_diff_tree ($@) {
$ci{ "${who}_epoch" } = $epoch;
$ci{ "${who}_tz" } = $tz;
- if ($name =~ m/^[^<]+\s+<([^@>]+)/) {
- $ci{"${who}_username"} = $1;
+ if ($name =~ m/^([^<]+)\s+<([^@>]+)/) {
+ $ci{"${who}_name"} = $1;
+ $ci{"${who}_username"} = $2;
}
elsif ($name =~ m/^([^<]+)\s+<>$/) {
$ci{"${who}_username"} = $1;
@@ -343,16 +402,9 @@ sub parse_diff_tree ($@) {
my $sha1_to = shift(@tmp);
my $status = shift(@tmp);
- # git does not output utf-8 filenames, but instead
- # double-quotes them with the utf-8 characters
- # escaped as \nnn\nnn.
- if ($file =~ m/^"(.*)"$/) {
- ($file=$1) =~ s/\\([0-7]{1,3})/chr(oct($1))/eg;
- }
- $file =~ s/^\Q$prefix\E//;
if (length $file) {
push @{ $ci{'details'} }, {
- 'file' => decode("utf8", $file),
+ 'file' => decode_git_file($file),
'sha1_from' => $sha1_from[0],
'sha1_to' => $sha1_to,
'mode_from' => $mode_from[0],
@@ -379,10 +431,9 @@ sub git_commit_info ($;$) {
my @raw_lines = run_or_die('git', 'log', @opts,
'--pretty=raw', '--raw', '--abbrev=40', '--always', '-c',
'-r', $sha1, '--', '.');
- my ($prefix) = run_or_die('git', 'rev-parse', '--show-prefix');
my @ci;
- while (my $parsed = parse_diff_tree(($prefix or ""), \@raw_lines)) {
+ while (my $parsed = parse_diff_tree(\@raw_lines)) {
push @ci, $parsed;
}
@@ -400,7 +451,10 @@ sub git_sha1 (;$) {
'--', $file);
if ($sha1) {
($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now
- } else { debug("Empty sha1sum for '$file'.") }
+ }
+ else {
+ debug("Empty sha1sum for '$file'.");
+ }
return defined $sha1 ? $sha1 : q{};
}
@@ -420,43 +474,62 @@ sub rcs_prepedit ($) {
return git_sha1($file);
}
-sub rcs_commit ($$$;$$) {
+sub rcs_commit (@) {
# Try to commit the page; returns undef on _success_ and
# a version of the page with the rcs's conflict markers on
# failure.
-
- my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
+ my %params=@_;
# Check to see if the page has been changed by someone else since
# rcs_prepedit was called.
- my $cur = git_sha1($file);
- my ($prev) = $rcstoken =~ /^($sha1_pattern)$/; # untaint
+ my $cur = git_sha1($params{file});
+ my ($prev) = $params{token} =~ /^($sha1_pattern)$/; # untaint
if (defined $cur && defined $prev && $cur ne $prev) {
- my $conflict = merge_past($prev, $file, $dummy_commit_msg);
+ my $conflict = merge_past($prev, $params{file}, $dummy_commit_msg);
return $conflict if defined $conflict;
}
- rcs_add($file);
- return rcs_commit_staged($message, $user, $ipaddr);
+ rcs_add($params{file});
+ return rcs_commit_staged(
+ message => $params{message},
+ session => $params{session},
+ );
}
-sub rcs_commit_staged ($$$) {
+sub rcs_commit_staged (@) {
# Commits all staged changes. Changes can be staged using rcs_add,
# rcs_remove, and rcs_rename.
- my ($message, $user, $ipaddr)=@_;
-
- # Set the commit author and email to the web committer.
+ my %params=@_;
+
my %env=%ENV;
- if (defined $user || defined $ipaddr) {
- my $u=encode_utf8(defined $user ? $user : $ipaddr);
- $ENV{GIT_AUTHOR_NAME}=$u;
- $ENV{GIT_AUTHOR_EMAIL}="$u\@web";
+
+ if (defined $params{session}) {
+ # Set the commit author and email based on web session info.
+ my $u;
+ if (defined $params{session}->param("name")) {
+ $u=$params{session}->param("name");
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ $u=$params{session}->remote_addr();
+ }
+ if (defined $u) {
+ $u=encode_utf8($u);
+ $ENV{GIT_AUTHOR_NAME}=$u;
+ }
+ if (defined $params{session}->param("nickname")) {
+ $u=encode_utf8($params{session}->param("nickname"));
+ $u=~s/\s+/_/g;
+ $u=~s/[^-_0-9[:alnum:]]+//g;
+ }
+ if (defined $u) {
+ $ENV{GIT_AUTHOR_EMAIL}="$u\@web";
+ }
}
- $message = IkiWiki::possibly_foolish_untaint($message);
+ $params{message} = IkiWiki::possibly_foolish_untaint($params{message});
my @opts;
- if ($message !~ /\S/) {
+ if ($params{message} !~ /\S/) {
# Force git to allow empty commit messages.
# (If this version of git supports it.)
my ($version)=`git --version` =~ /git version (.*)/;
@@ -464,13 +537,13 @@ sub rcs_commit_staged ($$$) {
push @opts, '--cleanup=verbatim';
}
else {
- $message.=".";
+ $params{message}.=".";
}
}
push @opts, '-q';
# git commit returns non-zero if file has not been really changed.
# so we should ignore its exit status (hence run_or_non).
- if (run_or_non('git', 'commit', @opts, '-m', $message)) {
+ if (run_or_non('git', 'commit', @opts, '-m', $params{message})) {
if (length $config{gitorigin_branch}) {
run_or_cry('git', 'push', $config{gitorigin_branch});
}
@@ -547,7 +620,16 @@ sub rcs_recentchanges ($) {
my $user=$ci->{'author_username'};
my $web_commit = ($ci->{'author'} =~ /\@web>/);
-
+ my $nickname;
+
+ # Set nickname only if a non-url author_username is available,
+ # and author_name is an url.
+ if ($user !~ /:\/\// && defined $ci->{'author_name'} &&
+ $ci->{'author_name'} =~ /:\/\//) {
+ $nickname=$user;
+ $user=$ci->{'author_name'};
+ }
+
# compatability code for old web commit messages
if (! $web_commit &&
defined $messages[0] &&
@@ -560,6 +642,7 @@ sub rcs_recentchanges ($) {
push @rets, {
rev => $sha1,
user => $user,
+ nickname => $nickname,
committype => $web_commit ? "web" : "git",
when => $when,
message => [@messages],
@@ -589,17 +672,50 @@ sub rcs_diff ($) {
}
}
+{
+my %time_cache;
+
+sub findtimes ($$) {
+ my $file=shift;
+ my $id=shift; # 0 = mtime ; 1 = ctime
+
+ if (! keys %time_cache) {
+ my $date;
+ foreach my $line (run_or_die('git', 'log',
+ '--pretty=format:%ct',
+ '--name-only', '--relative')) {
+ if (! defined $date && $line =~ /^(\d+)$/) {
+ $date=$line;
+ }
+ elsif (! length $line) {
+ $date=undef;
+ }
+ else {
+ my $f=decode_git_file($line);
+
+ if (! $time_cache{$f}) {
+ $time_cache{$f}[0]=$date; # mtime
+ }
+ $time_cache{$f}[1]=$date; # ctime
+ }
+ }
+ }
+
+ return exists $time_cache{$file} ? $time_cache{$file}[$id] : 0;
+}
+
+}
+
sub rcs_getctime ($) {
my $file=shift;
- # Remove srcdir prefix
- $file =~ s/^\Q$config{srcdir}\E\/?//;
- my @sha1s = run_or_non('git', 'rev-list', 'HEAD', '--', $file);
- my $ci = git_commit_info($sha1s[$#sha1s], 1);
- my $ctime = $ci->{'author_epoch'};
- debug("ctime for '$file': ". localtime($ctime));
+ return findtimes($file, 1);
+}
+
+sub rcs_getmtime ($) {
+ my $file=shift;
- return $ctime;
+ return findtimes($file, 0);
}
sub rcs_receive () {
diff --git a/IkiWiki/Plugin/google.pm b/IkiWiki/Plugin/google.pm
index 1683220e7..68cde261c 100644
--- a/IkiWiki/Plugin/google.pm
+++ b/IkiWiki/Plugin/google.pm
@@ -6,8 +6,6 @@ use strict;
use IkiWiki 3.00;
use URI;
-my $host;
-
sub import {
hook(type => "getsetup", id => "google", call => \&getsetup);
hook(type => "checkconfig", id => "google", call => \&checkconfig);
@@ -19,6 +17,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "web",
},
}
@@ -26,11 +25,10 @@ sub checkconfig () {
if (! length $config{url}) {
error(sprintf(gettext("Must specify %s when using the %s plugin"), "url", 'google'));
}
- my $uri=URI->new($config{url});
- if (! $uri || ! defined $uri->host) {
- error(gettext("Failed to parse url, cannot determine domain name"));
- }
- $host=$uri->host;
+
+ # This is a mass dependency, so if the search form template
+ # changes, every page is rebuilt.
+ add_depends("", "templates/googleform.tmpl");
}
my $form;
@@ -43,7 +41,8 @@ sub pagetemplate (@) {
if ($template->query(name => "searchform")) {
if (! defined $form) {
my $searchform = template("googleform.tmpl", blind_cache => 1);
- $searchform->param(sitefqdn => $host);
+ $searchform->param(url => $config{url});
+ $searchform->param(html5 => $config{html5});
$form=$searchform->output;
}
diff --git a/IkiWiki/Plugin/goto.pm b/IkiWiki/Plugin/goto.pm
index 2e2dc04a1..42d2425ca 100644
--- a/IkiWiki/Plugin/goto.pm
+++ b/IkiWiki/Plugin/goto.pm
@@ -7,6 +7,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "cgi", id => 'goto', call => \&cgi);
+ hook(type => "getsetup", id => 'goto', call => \&getsetup);
}
sub getsetup () {
@@ -14,6 +15,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "web",
}
}
@@ -40,18 +42,20 @@ sub cgi_goto ($;$) {
IkiWiki::loadindex();
- # If the page is internal (like a comment), see if it has a
- # permalink. Comments do.
- if (IkiWiki::isinternal($page) &&
- defined $pagestate{$page}{meta}{permalink}) {
- IkiWiki::redirect($q, $pagestate{$page}{meta}{permalink});
+ my $link;
+ if (! IkiWiki::isinternal($page)) {
+ $link = bestlink("", $page);
+ }
+ elsif (defined $pagestate{$page}{meta}{permalink}) {
+ # Can only redirect to an internal page if it has a
+ # permalink.
+ IkiWiki::redirect($q, $pagestate{$page}{meta}{permalink});
}
-
- my $link = bestlink("", $page);
if (! length $link) {
IkiWiki::cgi_custom_failure(
- $q->header(-status => "404 Not Found"),
+ $q,
+ "404 Not Found",
IkiWiki::misctemplate(gettext("missing page"),
"<p>".
sprintf(gettext("The page %s does not exist."),
diff --git a/IkiWiki/Plugin/graphviz.pm b/IkiWiki/Plugin/graphviz.pm
index 32e994d6b..4ed8b89f1 100644
--- a/IkiWiki/Plugin/graphviz.pm
+++ b/IkiWiki/Plugin/graphviz.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -36,10 +37,10 @@ sub render_graph (\%) {
$src .= "}\n";
# Use the sha1 of the graphviz code as part of its filename.
- eval q{use Digest::SHA1};
+ eval q{use Digest::SHA};
error($@) if $@;
my $dest=$params{page}."/graph-".
- IkiWiki::possibly_foolish_untaint(Digest::SHA1::sha1_hex($src)).
+ IkiWiki::possibly_foolish_untaint(Digest::SHA::sha1_hex($src)).
".png";
will_render($params{page}, $dest);
@@ -70,7 +71,8 @@ sub render_graph (\%) {
writefile($dest, $config{destdir}, $png, 1);
}
else {
- # can't write the file, so embed it in a data uri
+ # in preview mode, embed the image in a data uri
+ # to avoid temp file clutter
eval q{use MIME::Base64};
error($@) if $@;
return "<img src=\"data:image/png;base64,".
@@ -78,12 +80,7 @@ sub render_graph (\%) {
}
}
- if ($params{preview}) {
- return "<img src=\"".urlto($dest, "")."\" />\n";
- }
- else {
- return "<img src=\"".urlto($dest, $params{destpage})."\" />\n";
- }
+ return "<img src=\"".urlto($dest, $params{destpage})."\" />\n";
}
sub graph (@) {
diff --git a/IkiWiki/Plugin/haiku.pm b/IkiWiki/Plugin/haiku.pm
index 5a062a276..bf23dce67 100644
--- a/IkiWiki/Plugin/haiku.pm
+++ b/IkiWiki/Plugin/haiku.pm
@@ -16,6 +16,7 @@ sub getsetup {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
diff --git a/IkiWiki/Plugin/highlight.pm b/IkiWiki/Plugin/highlight.pm
index 9bdde85ae..e517ac5c0 100644
--- a/IkiWiki/Plugin/highlight.pm
+++ b/IkiWiki/Plugin/highlight.pm
@@ -23,6 +23,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
tohighlight => {
type => "string",
@@ -79,7 +80,7 @@ my %highlighters;
# Parse highlight's config file to get extension => language mappings.
sub read_filetypes () {
- open (IN, $filetypes);
+ open (IN, $filetypes) || error("$filetypes: $!");
while (<IN>) {
chomp;
if (/^\$ext\((.*)\)=(.*)$/) {
diff --git a/IkiWiki/Plugin/hnb.pm b/IkiWiki/Plugin/hnb.pm
index bd2177a06..5157a6b93 100644
--- a/IkiWiki/Plugin/hnb.pm
+++ b/IkiWiki/Plugin/hnb.pm
@@ -23,6 +23,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
}
@@ -32,8 +33,8 @@ sub htmlize (@) {
# hnb outputs version number etc. every time to STDOUT, so
# using files makes it easier to seprarate.
- my $tmpin = mkstemp( "/tmp/ikiwiki-hnbin.XXXXXXXXXX" );
- my $tmpout = mkstemp( "/tmp/ikiwiki-hnbout.XXXXXXXXXX" );
+ my ($infh, $tmpin) = mkstemp( "/tmp/ikiwiki-hnbin.XXXXXXXXXX" );
+ my ($outfh, $tmpout) = mkstemp( "/tmp/ikiwiki-hnbout.XXXXXXXXXX" );
open(TMP, '>', $tmpin) or die "Can't write to $tmpin: $!";
print TMP $params{content};
diff --git a/IkiWiki/Plugin/html.pm b/IkiWiki/Plugin/html.pm
index a7d5e8ce9..4dbae081b 100644
--- a/IkiWiki/Plugin/html.pm
+++ b/IkiWiki/Plugin/html.pm
@@ -21,6 +21,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
}
diff --git a/IkiWiki/Plugin/htmlscrubber.pm b/IkiWiki/Plugin/htmlscrubber.pm
index a249cdf7a..927792f79 100644
--- a/IkiWiki/Plugin/htmlscrubber.pm
+++ b/IkiWiki/Plugin/htmlscrubber.pm
@@ -30,9 +30,9 @@ sub import {
"msnim", "notes", "rsync", "secondlife", "skype", "ssh",
"sftp", "smb", "sms", "snews", "webcal", "ymsgr",
);
- # data is a special case. Allow data:image/*, but
- # disallow data:text/javascript and everything else.
- $safe_url_regexp=qr/^(?:(?:$uri_schemes):|data:image\/|[^:]+(?:$|\/))/i;
+ # data is a special case. Allow a few data:image/ types,
+ # but disallow data:text/javascript and everything else.
+ $safe_url_regexp=qr/^(?:(?:$uri_schemes):|data:image\/(?:png|jpeg|gif)|[^:]+(?:$|[\/\?#]))|^#/i;
}
sub getsetup () {
@@ -40,6 +40,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "core",
},
htmlscrubber_skip => {
type => "pagespec",
@@ -71,7 +72,7 @@ sub scrubber {
eval q{use HTML::Scrubber};
error($@) if $@;
# Lists based on http://feedparser.org/docs/html-sanitization.html
- # With html 5 video and audio tags added.
+ # With html5 tags added.
$_scrubber = HTML::Scrubber->new(
allow => [qw{
a abbr acronym address area b big blockquote br br/
@@ -81,7 +82,10 @@ sub scrubber {
menu ol optgroup option p p/ pre q s samp select small
span strike strong sub sup table tbody td textarea
tfoot th thead tr tt u ul var
- video audio
+
+ video audio source section nav article aside hgroup
+ header footer figure figcaption time mark canvas
+ datalist progress meter ruby rt rp details summary
}],
default => [undef, { (
map { $_ => 1 } qw{
@@ -97,13 +101,19 @@ sub scrubber {
selected shape size span start summary
tabindex target title type valign
value vspace width
- autoplay loopstart loopend end
- playcount controls
+
+ autofocus autoplay preload loopstart
+ loopend end playcount controls pubdate
+ placeholder min max step low high optimum
+ form required autocomplete novalidate pattern
+ list formenctype formmethod formnovalidate
+ formtarget reversed spellcheck open hidden
} ),
"/" => 1, # emit proper <hr /> XHTML
href => $safe_url_regexp,
src => $safe_url_regexp,
action => $safe_url_regexp,
+ formaction => $safe_url_regexp,
cite => $safe_url_regexp,
longdesc => $safe_url_regexp,
poster => $safe_url_regexp,
diff --git a/IkiWiki/Plugin/htmltidy.pm b/IkiWiki/Plugin/htmltidy.pm
index 6f3379ef4..e6d377f8a 100644
--- a/IkiWiki/Plugin/htmltidy.pm
+++ b/IkiWiki/Plugin/htmltidy.pm
@@ -46,7 +46,9 @@ sub sanitize (@) {
waitpid $pid, 0;
$SIG{PIPE}="DEFAULT";
- return "" if $sigpipe || ! defined $ret;
+ if ($sigpipe || ! defined $ret) {
+ return gettext("htmltidy failed to parse this html");
+ }
return $ret;
}
diff --git a/IkiWiki/Plugin/httpauth.pm b/IkiWiki/Plugin/httpauth.pm
index 1816c9d74..478f67446 100644
--- a/IkiWiki/Plugin/httpauth.pm
+++ b/IkiWiki/Plugin/httpauth.pm
@@ -9,6 +9,10 @@ use IkiWiki 3.00;
sub import {
hook(type => "getsetup", id => "httpauth", call => \&getsetup);
hook(type => "auth", id => "httpauth", call => \&auth);
+ hook(type => "formbuilder_setup", id => "httpauth",
+ call => \&formbuilder_setup);
+ hook(type => "canedit", id => "httpauth", call => \&canedit,
+ first => 1);
}
sub getsetup () {
@@ -16,7 +20,32 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
+ cgiauthurl => {
+ type => "string",
+ example => "http://example.com/wiki/auth/ikiwiki.cgi",
+ description => "url to redirect to when authentication is needed",
+ safe => 1,
+ rebuild => 0,
+ },
+ httpauth_pagespec => {
+ type => "pagespec",
+ example => "!*/Discussion",
+ description => "PageSpec of pages where only httpauth will be used for authentication",
+ safe => 0,
+ rebuild => 0,
+ },
+}
+
+sub redir_cgiauthurl ($;@) {
+ my $cgi=shift;
+
+ IkiWiki::redirect($cgi,
+ @_ > 1 ? IkiWiki::cgiurl(cgiurl => $config{cgiauthurl}, @_)
+ : $config{cgiauthurl}."?@_"
+ );
+ exit;
}
sub auth ($$) {
@@ -28,4 +57,52 @@ sub auth ($$) {
}
}
+sub formbuilder_setup (@) {
+ my %params=@_;
+
+ my $form=$params{form};
+ my $session=$params{session};
+ my $cgi=$params{cgi};
+ my $buttons=$params{buttons};
+
+ if ($form->title eq "signin" &&
+ ! defined $cgi->remote_user() && defined $config{cgiauthurl}) {
+ my $button_text="Login with HTTP auth";
+ push @$buttons, $button_text;
+
+ if ($form->submitted && $form->submitted eq $button_text) {
+ # bounce thru cgiauthurl and then back to
+ # the stored postsignin action
+ redir_cgiauthurl($cgi, do => "postsignin");
+ }
+ }
+}
+
+sub test_httpauth_pagespec ($) {
+ my $page=shift;
+
+ return (
+ );
+}
+
+sub canedit ($$$) {
+ my $page=shift;
+ my $cgi=shift;
+ my $session=shift;
+
+ if (! defined $cgi->remote_user() &&
+ defined $config{httpauth_pagespec} &&
+ length $config{httpauth_pagespec} &&
+ defined $config{cgiauthurl} &&
+ pagespec_match($page, $config{httpauth_pagespec})) {
+ return sub {
+ # bounce thru cgiauthurl and back to edit action
+ redir_cgiauthurl($cgi, $cgi->query_string());
+ };
+ }
+ else {
+ return undef;
+ }
+}
+
1
diff --git a/IkiWiki/Plugin/img.pm b/IkiWiki/Plugin/img.pm
index 68b001671..2375ead89 100644
--- a/IkiWiki/Plugin/img.pm
+++ b/IkiWiki/Plugin/img.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -26,6 +27,10 @@ sub preprocess (@) {
my ($image) = $_[0] =~ /$config{wiki_file_regexp}/; # untaint
my %params=@_;
+ if (! defined $image) {
+ error("bad image filename");
+ }
+
if (exists $imgdefaults{$params{page}}) {
foreach my $key (keys %{$imgdefaults{$params{page}}}) {
if (! exists $params{$key}) {
@@ -34,7 +39,7 @@ sub preprocess (@) {
}
}
- if (! exists $params{size}) {
+ if (! exists $params{size} || ! length $params{size}) {
$params{size}='full';
}
@@ -44,6 +49,7 @@ sub preprocess (@) {
}
add_link($params{page}, $image);
+ add_depends($params{page}, $image);
# optimisation: detect scan mode, and avoid generating the image
if (! defined wantarray) {
@@ -63,46 +69,81 @@ sub preprocess (@) {
error gettext("Image::Magick is not installed") if $@;
my $im = Image::Magick->new;
my $imglink;
- my $r;
+ my $r = $im->Read($srcfile);
+ error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r;
+
+ my ($dwidth, $dheight);
if ($params{size} ne 'full') {
- add_depends($params{page}, $image);
-
my ($w, $h) = ($params{size} =~ /^(\d*)x(\d*)$/);
error sprintf(gettext('wrong size format "%s" (should be WxH)'), $params{size})
unless (defined $w && defined $h &&
(length $w || length $h));
-
- my $outfile = "$config{destdir}/$dir/${w}x${h}-$base";
- $imglink = "$dir/${w}x${h}-$base";
- will_render($params{page}, $imglink);
-
- if (-e $outfile && (-M $srcfile >= -M $outfile)) {
- $r = $im->Read($outfile);
- error sprintf(gettext("failed to read %s: %s"), $outfile, $r) if $r;
+ if ((length $w && $w > $im->Get("width")) ||
+ (length $h && $h > $im->Get("height"))) {
+ # resizing larger
+ $imglink = $file;
+
+ # don't generate larger image, just set display size
+ if (length $w && length $h) {
+ ($dwidth, $dheight)=($w, $h);
+ }
+ # avoid division by zero on 0x0 image
+ elsif ($im->Get("width") == 0 || $im->Get("height") == 0) {
+ ($dwidth, $dheight)=(0, 0);
+ }
+ # calculate unspecified size from the other one, preserving
+ # aspect ratio
+ elsif (length $w) {
+ $dwidth=$w;
+ $dheight=$w / $im->Get("width") * $im->Get("height");
+ }
+ elsif (length $h) {
+ $dheight=$h;
+ $dwidth=$h / $im->Get("height") * $im->Get("width");
+ }
}
else {
- $r = $im->Read($srcfile);
- error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r;
-
- $r = $im->Resize(geometry => "${w}x${h}");
- error sprintf(gettext("failed to resize: %s"), $r) if $r;
+ # resizing smaller
+ my $outfile = "$config{destdir}/$dir/${w}x${h}-$base";
+ $imglink = "$dir/${w}x${h}-$base";
+
+ will_render($params{page}, $imglink);
- # don't actually write file in preview mode
- if (! $params{preview}) {
- my @blob = $im->ImageToBlob();
- writefile($imglink, $config{destdir}, $blob[0], 1);
+ if (-e $outfile && (-M $srcfile >= -M $outfile)) {
+ $im = Image::Magick->new;
+ $r = $im->Read($outfile);
+ error sprintf(gettext("failed to read %s: %s"), $outfile, $r) if $r;
}
else {
- $imglink = $file;
+ ($dwidth, $dheight)=($w, $h);
+ $r = $im->Resize(geometry => "${w}x${h}");
+ error sprintf(gettext("failed to resize: %s"), $r) if $r;
+
+ # don't actually write resized file in preview mode;
+ # rely on width and height settings
+ if (! $params{preview}) {
+ my @blob = $im->ImageToBlob();
+ writefile($imglink, $config{destdir}, $blob[0], 1);
+ }
+ else {
+ $imglink = $file;
+ }
}
+
+ $dwidth = $im->Get("width") unless defined $dwidth;
+ $dheight = $im->Get("height") unless defined $dheight;
}
}
else {
- $r = $im->Read($srcfile);
- error sprintf(gettext("failed to read %s: %s"), $file, $r) if $r;
$imglink = $file;
+ $dwidth = $im->Get("width");
+ $dheight = $im->Get("height");
+ }
+
+ if (! defined($dwidth) || ! defined($dheight)) {
+ error sprintf(gettext("failed to determine size of image %s"), $file)
}
my ($fileurl, $imgurl);
@@ -115,35 +156,54 @@ sub preprocess (@) {
$imgurl="$config{url}/$imglink";
}
- if (! defined($im->Get("width")) || ! defined($im->Get("height"))) {
- error sprintf(gettext("failed to determine size of image %s"), $file)
+ if (exists $params{class}) {
+ $params{class}.=" img";
+ }
+ else {
+ $params{class}="img";
}
+ my $attrs='';
+ foreach my $attr (qw{alt title class id hspace vspace}) {
+ if (exists $params{$attr}) {
+ $attrs.=" $attr=\"$params{$attr}\"";
+ }
+ }
+
my $imgtag='<img src="'.$imgurl.
- '" width="'.$im->Get("width").
- '" height="'.$im->Get("height").'"'.
- (exists $params{alt} ? ' alt="'.$params{alt}.'"' : '').
- (exists $params{title} ? ' title="'.$params{title}.'"' : '').
- (exists $params{align} ? ' align="'.$params{align}.'"' : '').
- (exists $params{class} ? ' class="'.$params{class}.'"' : '').
- (exists $params{id} ? ' id="'.$params{id}.'"' : '').
+ '" width="'.$dwidth.
+ '" height="'.$dheight.'"'.
+ $attrs.
+ (exists $params{align} && ! exists $params{caption} ? ' align="'.$params{align}.'"' : '').
' />';
- if (! defined $params{link} || lc($params{link}) eq 'yes') {
- $imgtag='<a href="'.$fileurl.'">'.$imgtag.'</a>';
+ my $link;
+ if (! defined $params{link}) {
+ $link=$fileurl;
}
elsif ($params{link} =~ /^\w+:\/\//) {
- $imgtag='<a href="'.$params{link}.'">'.$imgtag.'</a>';
+ $link=$params{link};
+ }
+
+ if (defined $link) {
+ $imgtag='<a href="'.$link.'">'.$imgtag.'</a>';
}
- elsif (length bestlink($params{page}, $params{link})) {
- add_depends($params{page}, $params{link});
- $imgtag=htmllink($params{page}, $params{destpage},
- $params{link}, linktext => $imgtag,
- noimageinline => 1);
+ else {
+ my $b = bestlink($params{page}, $params{link});
+
+ if (length $b) {
+ add_depends($params{page}, $b, deptype("presence"));
+ $imgtag=htmllink($params{page}, $params{destpage},
+ $params{link}, linktext => $imgtag,
+ noimageinline => 1,
+ );
+ }
}
if (exists $params{caption}) {
- return '<table class="img">'.
+ return '<table class="img'.
+ (exists $params{align} ? " align-$params{align}" : "").
+ '">'.
'<caption>'.$params{caption}.'</caption>'.
'<tr><td>'.$imgtag.'</td></tr>'.
'</table>';
diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm
index e7d6f250e..715a3d652 100644
--- a/IkiWiki/Plugin/inline.pm
+++ b/IkiWiki/Plugin/inline.pm
@@ -49,6 +49,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "core",
},
rss => {
type => "boolean",
@@ -159,7 +160,7 @@ sub preprocess_inline (@) {
my $rss=(($config{rss} || $config{allowrss}) && exists $params{rss}) ? yesno($params{rss}) : $config{rss};
my $atom=(($config{atom} || $config{allowatom}) && exists $params{atom}) ? yesno($params{atom}) : $config{atom};
my $quick=exists $params{quick} ? yesno($params{quick}) : 0;
- my $feeds=exists $params{feeds} ? yesno($params{feeds}) : !$quick;
+ my $feeds=! $nested && (exists $params{feeds} ? yesno($params{feeds}) : !$quick && ! $raw);
my $emptyfeeds=exists $params{emptyfeeds} ? yesno($params{emptyfeeds}) : 1;
my $feedonly=yesno($params{feedonly});
if (! exists $params{show} && ! $archive) {
@@ -186,7 +187,6 @@ sub preprocess_inline (@) {
my @list;
if (exists $params{pagenames}) {
-
foreach my $p (qw(sort pages)) {
if (exists $params{$p}) {
error sprintf(gettext("the %s and %s parameters cannot be used together"),
@@ -194,44 +194,40 @@ sub preprocess_inline (@) {
}
}
- @list = split ' ', $params{pagenames};
- my $_;
- @list = map { bestlink($params{page}, $_) } @list;
-
- $params{pages} = join(" or ", @list);
- }
- else {
- @list = pagespec_match_list(
- [ grep { $_ ne $params{page} } keys %pagesources ],
- $params{pages}, location => $params{page});
+ @list = map { bestlink($params{page}, $_) }
+ split ' ', $params{pagenames};
- if (exists $params{sort} && $params{sort} eq 'title') {
- @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list;
+ if (yesno($params{reverse})) {
+ @list=reverse(@list);
}
- elsif (exists $params{sort} && $params{sort} eq 'title_natural') {
- eval q{use Sort::Naturally};
- if ($@) {
- error(gettext("Sort::Naturally needed for title_natural sort"));
- }
- @list=sort { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) } @list;
+
+ foreach my $p (@list) {
+ add_depends($params{page}, $p, deptype($quick ? "presence" : "content"));
}
- elsif (exists $params{sort} && $params{sort} eq 'mtime') {
- @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list;
+ }
+ else {
+ my $num=0;
+ if ($params{show}) {
+ $num=$params{show};
}
- elsif (! exists $params{sort} || $params{sort} eq 'age') {
- @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
+ if ($params{feedshow} && $num < $params{feedshow} && $num > 0) {
+ $num=$params{feedshow};
}
- else {
- error sprintf(gettext("unknown sort type %s"), $params{sort});
+ if ($params{skip} && $num) {
+ $num+=$params{skip};
}
- }
- if (yesno($params{reverse})) {
- @list=reverse(@list);
+ @list = pagespec_match_list($params{page}, $params{pages},
+ deptype => deptype($quick ? "presence" : "content"),
+ filter => sub { $_[0] eq $params{page} },
+ sort => exists $params{sort} ? $params{sort} : "age",
+ reverse => yesno($params{reverse}),
+ ($num ? (num => $num) : ()),
+ );
}
if (exists $params{skip}) {
- @list=@list[$params{skip} .. scalar @list - 1];
+ @list=@list[$params{skip} .. $#list];
}
my @feedlist;
@@ -249,14 +245,12 @@ sub preprocess_inline (@) {
@list=@list[0..$params{show} - 1];
}
- add_depends($params{page}, $params{pages});
- # Explicitly add all currently displayed pages as dependencies, so
- # that if they are removed or otherwise changed, the inline will be
- # sure to be updated.
- add_depends($params{page}, join(" or ", $#list >= $#feedlist ? @list : @feedlist));
-
if ($feeds && exists $params{feedpages}) {
- @feedlist=grep { pagespec_match($_, $params{feedpages}, location => $params{page}) } @feedlist;
+ @feedlist = pagespec_match_list(
+ $params{page}, "($params{pages}) and ($params{feedpages})",
+ deptype => deptype($quick ? "presence" : "content"),
+ list => \@feedlist,
+ );
}
my ($feedbase, $feednum);
@@ -305,19 +299,9 @@ sub preprocess_inline (@) {
(exists $params{postform} && yesno($params{postform}))) &&
IkiWiki->can("cgi_editpage")) {
# Add a blog post form, with feed buttons.
- my $formtemplate=template("blogpost.tmpl", blind_cache => 1);
+ my $formtemplate=template_depends("blogpost.tmpl", $params{page}, blind_cache => 1);
$formtemplate->param(cgiurl => $config{cgiurl});
- my $rootpage;
- if (exists $params{rootpage}) {
- $rootpage=bestlink($params{page}, $params{rootpage});
- if (!length $rootpage) {
- $rootpage=$params{rootpage};
- }
- }
- else {
- $rootpage=$params{page};
- }
- $formtemplate->param(rootpage => $rootpage);
+ $formtemplate->param(rootpage => rootpage(%params));
$formtemplate->param(rssurl => $rssurl) if $feeds && $rss;
$formtemplate->param(atomurl => $atomurl) if $feeds && $atom;
if (exists $params{postformtext}) {
@@ -336,25 +320,35 @@ sub preprocess_inline (@) {
}
elsif ($feeds && !$params{preview} && ($emptyfeeds || @feedlist)) {
# Add feed buttons.
- my $linktemplate=template("feedlink.tmpl", blind_cache => 1);
+ my $linktemplate=template_depends("feedlink.tmpl", $params{page}, blind_cache => 1);
$linktemplate->param(rssurl => $rssurl) if $rss;
$linktemplate->param(atomurl => $atomurl) if $atom;
$ret.=$linktemplate->output;
}
if (! $feedonly) {
- require HTML::Template;
- my @params=IkiWiki::template_params($params{template}.".tmpl", blind_cache => 1);
- if (! @params) {
- error sprintf(gettext("nonexistant template %s"), $params{template});
+ my $template;
+ if (! $raw) {
+ # cannot use wiki pages as templates; template not sanitized due to
+ # format hook hack
+ eval {
+ $template=template_depends($params{template}.".tmpl", $params{page},
+ blind_cache => 1);
+ };
+ if ($@) {
+ error gettext("failed to process template:")." $@";
+ }
+ if (! $template) {
+ error sprintf(gettext("template %s not found"), $params{template}.".tmpl");
+ }
}
- my $template=HTML::Template->new(@params) unless $raw;
+ my $needcontent=$raw || (!($archive && $quick) && $template->query(name => 'content'));
foreach my $page (@list) {
my $file = $pagesources{$page};
my $type = pagetype($file);
- if (! $raw || ($raw && ! defined $type)) {
- unless ($archive && $quick) {
+ if (! $raw) {
+ if ($needcontent) {
# Get the content before populating the
# template, since getting the content uses
# the same template if inlines are nested.
@@ -364,31 +358,34 @@ sub preprocess_inline (@) {
$template->param(pageurl => urlto($page, $params{destpage}));
$template->param(inlinepage => $page);
$template->param(title => pagetitle(basename($page)));
- $template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}));
+ $template->param(ctime => displaytime($pagectime{$page}, $params{timeformat}, 1));
$template->param(mtime => displaytime($pagemtime{$page}, $params{timeformat}));
$template->param(first => 1) if $page eq $list[0];
$template->param(last => 1) if $page eq $list[$#list];
+ $template->param(html5 => $config{html5});
if ($actions) {
my $file = $pagesources{$page};
my $type = pagetype($file);
if ($config{discussion}) {
- my $discussionlink=lc(gettext("Discussion"));
- if ($page !~ /.*\/\Q$discussionlink\E$/ &&
+ if ($page !~ /.*\/\Q$config{discussionpage}\E$/i &&
(length $config{cgiurl} ||
- exists $links{$page."/".$discussionlink})) {
+ exists $pagesources{$page."/".lc($config{discussionpage})})) {
$template->param(have_actions => 1);
$template->param(discussionlink =>
htmllink($page,
$params{destpage},
- gettext("Discussion"),
+ $config{discussionpage},
noimageinline => 1,
forcesubpage => 1));
}
}
- if (length $config{cgiurl} && defined $type) {
+ if (length $config{cgiurl} &&
+ defined $type &&
+ IkiWiki->can("cgi_editpage")) {
$template->param(have_actions => 1);
$template->param(editurl => cgiurl(do => "edit", page => $page));
+
}
}
@@ -408,6 +405,10 @@ sub preprocess_inline (@) {
filter($page, $params{destpage},
readfile(srcfile($file)))));
}
+ else {
+ $ret.="\n".
+ readfile(srcfile($file));
+ }
}
}
}
@@ -436,6 +437,8 @@ sub preprocess_inline (@) {
}
}
+ clear_inline_content_cache();
+
return $ret if $raw || $nested;
push @inline, $ret;
return "<div class=\"inline\" id=\"$#inline\"></div>\n\n";
@@ -450,43 +453,57 @@ sub pagetemplate_inline (@) {
if exists $feedlinks{$page} && $template->query(name => "feedlinks");
}
+{
+my %inline_content;
+my $cached_destpage="";
+
sub get_inline_content ($$) {
my $page=shift;
my $destpage=shift;
+ if (exists $inline_content{$page} && $cached_destpage eq $destpage) {
+ return $inline_content{$page};
+ }
+
my $file=$pagesources{$page};
my $type=pagetype($file);
+ my $ret="";
if (defined $type) {
$nested++;
- my $ret=htmlize($page, $destpage, $type,
+ $ret=htmlize($page, $destpage, $type,
linkify($page, $destpage,
preprocess($page, $destpage,
filter($page, $destpage,
readfile(srcfile($file))))));
$nested--;
- return $ret;
+ if (isinternal($page)) {
+ # make inlined text of internal pages searchable
+ run_hooks(indexhtml => sub {
+ shift->(page => $page, destpage => $page,
+ content => $ret);
+ });
+ }
}
- else {
- return "";
+
+ if ($cached_destpage ne $destpage) {
+ clear_inline_content_cache();
+ $cached_destpage=$destpage;
}
+ return $inline_content{$page}=$ret;
}
-sub date_822 ($) {
- my $time=shift;
+sub clear_inline_content_cache () {
+ %inline_content=();
+}
- my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
- POSIX::setlocale(&POSIX::LC_TIME, "C");
- my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
- POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
- return $ret;
}
-sub date_3339 ($) {
+sub date_822 ($) {
my $time=shift;
my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
POSIX::setlocale(&POSIX::LC_TIME, "C");
- my $ret=POSIX::strftime("%Y-%m-%dT%H:%M:%SZ", gmtime($time));
+ my $ret=POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
return $ret;
}
@@ -524,7 +541,7 @@ sub genfeed ($$$$$@) {
my $url=URI->new(encode_utf8(urlto($page,"",1)));
- my $itemtemplate=template($feedtype."item.tmpl", blind_cache => 1);
+ my $itemtemplate=template_depends($feedtype."item.tmpl", $page, blind_cache => 1);
my $content="";
my $lasttime = 0;
foreach my $p (@pages) {
@@ -543,7 +560,8 @@ sub genfeed ($$$$$@) {
if (exists $pagestate{$p}) {
if (exists $pagestate{$p}{meta}{guid}) {
- $itemtemplate->param(guid => $pagestate{$p}{meta}{guid});
+ eval q{use HTML::Entities};
+ $itemtemplate->param(guid => HTML::Entities::encode_numeric($pagestate{$p}{meta}{guid}));
}
if (exists $pagestate{$p}{meta}{updated}) {
@@ -587,7 +605,7 @@ sub genfeed ($$$$$@) {
$lasttime = $pagemtime{$p} if $pagemtime{$p} > $lasttime;
}
- my $template=template($feedtype."page.tmpl", blind_cache => 1);
+ my $template=template_depends($feedtype."page.tmpl", $page, blind_cache => 1);
$template->param(
title => $page ne "index" ? pagetitle($page) : $config{wikiname},
wikiname => $config{wikiname},
@@ -655,4 +673,21 @@ sub pingurl (@) {
exit 0; # daemon done
}
+
+sub rootpage (@) {
+ my %params=@_;
+
+ my $rootpage;
+ if (exists $params{rootpage}) {
+ $rootpage=bestlink($params{page}, $params{rootpage});
+ if (!length $rootpage) {
+ $rootpage=$params{rootpage};
+ }
+ }
+ else {
+ $rootpage=$params{page};
+ }
+ return $rootpage;
+}
+
1
diff --git a/IkiWiki/Plugin/link.pm b/IkiWiki/Plugin/link.pm
index 4c1add985..f6c3573f7 100644
--- a/IkiWiki/Plugin/link.pm
+++ b/IkiWiki/Plugin/link.pm
@@ -7,6 +7,9 @@ use IkiWiki 3.00;
my $link_regexp;
+my $email_regexp = qr/^.+@.+$/;
+my $url_regexp = qr/^(?:[^:]+:\/\/|mailto:).*/i;
+
sub import {
hook(type => "getsetup", id => "link", call => \&getsetup);
hook(type => "checkconfig", id => "link", call => \&checkconfig);
@@ -20,6 +23,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "core",
},
}
@@ -56,10 +60,56 @@ sub checkconfig () {
)? # optional
\]\] # end of link
- }x,
+ }x;
}
}
+sub is_externallink ($$;$$) {
+ my $page = shift;
+ my $url = shift;
+ my $anchor = shift;
+ my $force = shift;
+
+ if (defined $anchor) {
+ $url.="#".$anchor;
+ }
+
+ if (! $force && $url =~ /$email_regexp/) {
+ # url looks like an email address, so we assume it
+ # is supposed to be an external link if there is no
+ # page with that name.
+ return (! (bestlink($page, linkpage($url))))
+ }
+
+ return ($url =~ /$url_regexp/)
+}
+
+sub externallink ($$;$) {
+ my $url = shift;
+ my $anchor = shift;
+ my $pagetitle = shift;
+
+ if (defined $anchor) {
+ $url.="#".$anchor;
+ }
+
+ # build pagetitle
+ if (! $pagetitle) {
+ $pagetitle = $url;
+ # use only the email address as title for mailto: urls
+ if ($pagetitle =~ /^mailto:.*/) {
+ $pagetitle =~ s/^mailto:([^?]+).*/$1/;
+ }
+ }
+
+ if ($url !~ /$url_regexp/) {
+ # handle email addresses (without mailto:)
+ $url = "mailto:" . $url;
+ }
+
+ return "<a href=\"$url\">$pagetitle</a>";
+}
+
sub linkify (@) {
my %params=@_;
my $page=$params{page};
@@ -68,13 +118,17 @@ sub linkify (@) {
$params{content} =~ s{(\\?)$link_regexp}{
defined $2
? ( $1
- ? "[[$2|$3".($4 ? "#$4" : "")."]]"
- : htmllink($page, $destpage, linkpage($3),
- anchor => $4, linktext => pagetitle($2)))
+ ? "[[$2|$3".(defined $4 ? "#$4" : "")."]]"
+ : is_externallink($page, $3, $4)
+ ? externallink($3, $4, $2)
+ : htmllink($page, $destpage, linkpage($3),
+ anchor => $4, linktext => pagetitle($2)))
: ( $1
- ? "[[$3".($4 ? "#$4" : "")."]]"
- : htmllink($page, $destpage, linkpage($3),
- anchor => $4))
+ ? "[[$3".(defined $4 ? "#$4" : "")."]]"
+ : is_externallink($page, $3, $4)
+ ? externallink($3, $4)
+ : htmllink($page, $destpage, linkpage($3),
+ anchor => $4))
}eg;
return $params{content};
@@ -86,7 +140,9 @@ sub scan (@) {
my $content=$params{content};
while ($content =~ /(?<!\\)$link_regexp/g) {
- add_link($page, linkpage($2));
+ if (! is_externallink($page, $2, $3, 1)) {
+ add_link($page, linkpage($2));
+ }
}
}
@@ -97,24 +153,26 @@ sub renamepage (@) {
my $new=$params{newpage};
$params{content} =~ s{(?<!\\)$link_regexp}{
- my $linktext=$2;
- my $link=$linktext;
- if (bestlink($page, linkpage($linktext)) eq $old) {
- $link=pagetitle($new, 1);
- $link=~s/ /_/g;
- if ($linktext =~ m/.*\/*?[A-Z]/) {
- # preserve leading cap of last component
- my @bits=split("/", $link);
- $link=join("/", @bits[0..$#bits-1], ucfirst($bits[$#bits]));
- }
- if (index($linktext, "/") == 0) {
- # absolute link
- $link="/$link";
+ if (! is_externallink($page, $2, $3)) {
+ my $linktext=$2;
+ my $link=$linktext;
+ if (bestlink($page, linkpage($linktext)) eq $old) {
+ $link=pagetitle($new, 1);
+ $link=~s/ /_/g;
+ if ($linktext =~ m/.*\/*?[A-Z]/) {
+ # preserve leading cap of last component
+ my @bits=split("/", $link);
+ $link=join("/", @bits[0..$#bits-1], ucfirst($bits[$#bits]));
+ }
+ if (index($linktext, "/") == 0) {
+ # absolute link
+ $link="/$link";
+ }
}
+ defined $1
+ ? ( "[[$1|$link".($3 ? "#$3" : "")."]]" )
+ : ( "[[$link". ($3 ? "#$3" : "")."]]" )
}
- defined $1
- ? ( "[[$1|$link".($3 ? "#$3" : "")."]]" )
- : ( "[[$link". ($3 ? "#$3" : "")."]]" )
}eg;
return $params{content};
diff --git a/IkiWiki/Plugin/linkmap.pm b/IkiWiki/Plugin/linkmap.pm
index 941ed5f36..ac26e072e 100644
--- a/IkiWiki/Plugin/linkmap.pm
+++ b/IkiWiki/Plugin/linkmap.pm
@@ -9,7 +9,6 @@ use IPC::Open2;
sub import {
hook(type => "getsetup", id => "linkmap", call => \&getsetup);
hook(type => "preprocess", id => "linkmap", call => \&preprocess);
- hook(type => "format", id => "linkmap", call => \&format);
}
sub getsetup () {
@@ -17,50 +16,26 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
my $mapnum=0;
-my %maps;
sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
- # Needs to update whenever a page is added or removed, so
- # register a dependency.
- add_depends($params{page}, $params{pages});
-
- # Can't just return the linkmap here, since the htmlscrubber
- # scrubs out all <object> tags (with good reason!)
- # Instead, insert a placeholder tag, which will be expanded during
- # formatting.
$mapnum++;
- $maps{$mapnum}=\%params;
- return "<div class=\"linkmap$mapnum\"></div>";
-}
-
-sub format (@) {
- my %params=@_;
-
- $params{content}=~s/<div class=\"linkmap(\d+)"><\/div>/genmap($1)/eg;
-
- return $params{content};
-}
-
-sub genmap ($) {
- my $mapnum=shift;
- return "" unless exists $maps{$mapnum};
- my %params=%{$maps{$mapnum}};
+ my $connected=IkiWiki::yesno($params{connected});
# Get all the items to map.
- my %mapitems = ();
- foreach my $item (keys %links) {
- if (pagespec_match($item, $params{pages}, location => $params{page})) {
- $mapitems{$item}=urlto($item, $params{destpage});
- }
- }
+ my %mapitems = map { $_ => urlto($_, $params{destpage}) }
+ pagespec_match_list($params{page}, $params{pages},
+ # update when a page is added or removed, or its
+ # links change
+ deptype => deptype("presence", "links"));
my $dest=$params{page}."/linkmap.png";
@@ -84,24 +59,38 @@ sub genmap ($) {
print OUT "charset=\"utf-8\";\n";
print OUT "ratio=compress;\nsize=\"".($params{width}+0).", ".($params{height}+0)."\";\n"
if defined $params{width} and defined $params{height};
+ my %shown;
+ my $show=sub {
+ my $item=shift;
+ if (! $shown{$item}) {
+ print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n";
+ $shown{$item}=1;
+ }
+ };
foreach my $item (keys %mapitems) {
- print OUT "\"$item\" [shape=box,href=\"$mapitems{$item}\"];\n";
+ $show->($item) unless $connected;
foreach my $link (map { bestlink($item, $_) } @{$links{$item}}) {
- print OUT "\"$item\" -> \"$link\";\n"
- if $mapitems{$link};
+ next unless length $link and $mapitems{$link};
+ foreach my $endpoint ($item, $link) {
+ $show->($endpoint);
+ }
+ print OUT "\"$item\" -> \"$link\";\n";
}
}
print OUT "}\n";
- close OUT;
+ close OUT || error gettext("failed to run dot");
local $/=undef;
- my $ret="<object data=\"".urlto($dest, $params{destpage}).
- "\" type=\"image/png\" usemap=\"#linkmap$mapnum\">\n".
- <IN>.
- "</object>";
- close IN;
+ my $ret="<img src=\"".urlto($dest, $params{destpage}).
+ "\" alt=\"".gettext("linkmap").
+ "\" usemap=\"#linkmap$mapnum\" />\n".
+ <IN>;
+ close IN || error gettext("failed to run dot");
waitpid $pid, 0;
+ if ($?) {
+ error gettext("failed to run dot");
+ }
$SIG{PIPE}="DEFAULT";
error gettext("failed to run dot") if $sigpipe;
diff --git a/IkiWiki/Plugin/listdirectives.pm b/IkiWiki/Plugin/listdirectives.pm
index bd73f1a04..8a67f7160 100644
--- a/IkiWiki/Plugin/listdirectives.pm
+++ b/IkiWiki/Plugin/listdirectives.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
directive_description_dir => {
type => "string",
@@ -84,7 +85,7 @@ sub preprocess (@) {
foreach my $plugin (@pluginlist) {
$result .= '<li class="listdirectives">';
my $link=linkpage($config{directive_description_dir}."/".$plugin);
- add_depends($params{page}, $link);
+ add_depends($params{page}, $link, deptype("presence"));
$result .= htmllink($params{page}, $params{destpage}, $link);
$result .= '</li>';
}
diff --git a/IkiWiki/Plugin/localstyle.pm b/IkiWiki/Plugin/localstyle.pm
new file mode 100644
index 000000000..111f4dc30
--- /dev/null
+++ b/IkiWiki/Plugin/localstyle.pm
@@ -0,0 +1,35 @@
+#!/usr/bin/perl
+
+package IkiWiki::Plugin::localstyle;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "localstyle", call => \&getsetup);
+ hook(type => "pagetemplate", id => "localstyle", call => \&pagetemplate);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => 1,
+ },
+}
+
+sub pagetemplate (@) {
+ my %params=@_;
+
+ my $template=$params{template};
+
+ if ($template->query(name => "local_css")) {
+ my $best=bestlink($params{page}, 'local.css');
+ if ($best) {
+ $template->param(local_css => $best);
+ }
+ }
+}
+
+1
diff --git a/IkiWiki/Plugin/lockedit.pm b/IkiWiki/Plugin/lockedit.pm
index 0fa329251..5b50fd115 100644
--- a/IkiWiki/Plugin/lockedit.pm
+++ b/IkiWiki/Plugin/lockedit.pm
@@ -15,6 +15,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
locked_pages => {
type => "pagespec",
@@ -37,10 +38,11 @@ sub canedit ($$) {
if (defined $config{locked_pages} && length $config{locked_pages} &&
pagespec_match($page, $config{locked_pages},
user => $session->param("name"),
- ip => $ENV{REMOTE_ADDR},
+ ip => $session->remote_addr(),
)) {
- if (! defined $user ||
- ! IkiWiki::userinfo_get($session->param("name"), "regdate")) {
+ if ((! defined $user ||
+ ! IkiWiki::userinfo_get($session->param("name"), "regdate")) &&
+ exists $IkiWiki::hooks{auth}) {
return sub { IkiWiki::needsignin($cgi, $session) };
}
else {
diff --git a/IkiWiki/Plugin/map.pm b/IkiWiki/Plugin/map.pm
index 826dbbd66..ce3ac1d24 100644
--- a/IkiWiki/Plugin/map.pm
+++ b/IkiWiki/Plugin/map.pm
@@ -21,6 +21,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -28,12 +29,16 @@ sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
+ # Needs to update whenever a page is added or removed (or in some
+ # cases, when its content changes, if show= is specified).
+ my $deptype=deptype(exists $params{show} ? "content" : "presence");
+
my $common_prefix;
# Get all the items to map.
my %mapitems;
- foreach my $page (pagespec_match_list([keys %pagesources],
- $params{pages}, location => $params{page})) {
+ foreach my $page (pagespec_match_list($params{page}, $params{pages},
+ deptype => $deptype)) {
if (exists $params{show} &&
exists $pagestate{$page} &&
exists $pagestate{$page}{meta}{$params{show}}) {
@@ -67,14 +72,6 @@ sub preprocess (@) {
$common_prefix=IkiWiki::dirname($common_prefix);
}
- # Needs to update whenever a page is added or removed (or in some
- # cases, when its content changes, if show=title), so register a
- # dependency.
- add_depends($params{page}, $params{pages});
- # Explicitly add all currently shown pages, to detect when pages
- # are removed.
- add_depends($params{page}, join(" or ", keys %mapitems));
-
# Create the map.
my $parent="";
my $indent=0;
@@ -82,12 +79,12 @@ sub preprocess (@) {
my $addparent="";
my $map = "<div class='map'>\n";
- # Return empty div if %mapitems is empty
- if (!scalar(keys %mapitems)) {
+ if (! keys %mapitems) {
+ # return empty div for empty map
$map .= "</div>\n";
return $map;
}
- else { # continue populating $map
+ else {
$map .= "<ul>\n";
}
diff --git a/IkiWiki/Plugin/mdwn.pm b/IkiWiki/Plugin/mdwn.pm
index c62780cb8..b892eabee 100644
--- a/IkiWiki/Plugin/mdwn.pm
+++ b/IkiWiki/Plugin/mdwn.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
multimarkdown => {
type => "boolean",
@@ -43,8 +44,10 @@ sub htmlize (@) {
if ($@) {
debug(gettext("multimarkdown is enabled, but Text::MultiMarkdown is not installed"));
}
- $markdown_sub=sub {
- Text::MultiMarkdown::markdown(shift, {use_metadata => 0});
+ else {
+ $markdown_sub=sub {
+ Text::MultiMarkdown::markdown(shift, {use_metadata => 0});
+ }
}
}
if (! defined $markdown_sub) {
diff --git a/IkiWiki/Plugin/mercurial.pm b/IkiWiki/Plugin/mercurial.pm
index 11fdec529..59dc63b4e 100644
--- a/IkiWiki/Plugin/mercurial.pm
+++ b/IkiWiki/Plugin/mercurial.pm
@@ -20,6 +20,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub checkconfig () {
@@ -36,6 +37,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
mercurial_wrapper => {
type => "string",
@@ -124,26 +126,26 @@ sub rcs_prepedit ($) {
return "";
}
-sub rcs_commit ($$$;$$) {
- my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
+sub rcs_commit (@) {
+ my %params=@_;
- if (defined $user) {
- $user = IkiWiki::possibly_foolish_untaint($user);
- }
- elsif (defined $ipaddr) {
- $user = "Anonymous from ".IkiWiki::possibly_foolish_untaint($ipaddr);
- }
- else {
- $user = "Anonymous";
+ my $user="Anonymous";
+ if (defined $params{session}) {
+ if (defined $params{session}->param("name")) {
+ $user = $params{session}->param("name");
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ $user = "Anonymous from ".$params{session}->remote_addr();
+ }
}
- $message = IkiWiki::possibly_foolish_untaint($message);
- if (! length $message) {
- $message = "no message given";
+ if (! length $params{message}) {
+ $params{message} = "no message given";
}
my @cmdline = ("hg", "-q", "-R", $config{srcdir}, "commit",
- "-m", $message, "-u", $user);
+ "-m", IkiWiki::possibly_foolish_untaint($params{message}),
+ "-u", IkiWiki::possibly_foolish_untaint($user));
if (system(@cmdline) != 0) {
warn "'@cmdline' failed: $!";
}
@@ -151,10 +153,10 @@ sub rcs_commit ($$$;$$) {
return undef; # success
}
-sub rcs_commit_staged ($$$) {
+sub rcs_commit_staged (@) {
# Commits all staged changes. Changes can be staged using rcs_add,
# rcs_remove, and rcs_rename.
- my ($message, $user, $ipaddr)=@_;
+ my %params=@_;
error("rcs_commit_staged not implemented for mercurial"); # TODO
}
@@ -234,15 +236,13 @@ sub rcs_diff ($) {
sub rcs_getctime ($) {
my ($file) = @_;
- # XXX filename passes through the shell here, should try to avoid
- # that just in case
my @cmdline = ("hg", "-R", $config{srcdir}, "log", "-v",
"--style", "default", "$config{srcdir}/$file");
- open (my $out, "@cmdline |");
+ open (my $out, "-|", @cmdline);
- my @log = mercurial_log($out);
+ my @log = (mercurial_log($out));
- if (length @log < 1) {
+ if (@log < 1) {
return 0;
}
@@ -253,4 +253,8 @@ sub rcs_getctime ($) {
return $ctime;
}
+sub rcs_getmtime ($) {
+ error "rcs_getmtime is not implemented for mercurial\n"; # TODO
+}
+
1
diff --git a/IkiWiki/Plugin/meta.pm b/IkiWiki/Plugin/meta.pm
index b2295923e..d18585d3d 100644
--- a/IkiWiki/Plugin/meta.pm
+++ b/IkiWiki/Plugin/meta.pm
@@ -20,6 +20,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "core",
},
}
@@ -87,15 +88,21 @@ sub preprocess (@) {
# Metadata collection that needs to happen during the scan pass.
if ($key eq 'title') {
- $pagestate{$page}{meta}{title}=HTML::Entities::encode_numeric($value);
- # fallthrough
+ $pagestate{$page}{meta}{title}=$value;
+ if (exists $params{sortas}) {
+ $pagestate{$page}{meta}{titlesort}=$params{sortas};
+ }
+ else {
+ delete $pagestate{$page}{meta}{titlesort};
+ }
+ return "";
}
elsif ($key eq 'description') {
- $pagestate{$page}{meta}{description}=HTML::Entities::encode_numeric($value);
+ $pagestate{$page}{meta}{description}=$value;
# fallthrough
}
elsif ($key eq 'guid') {
- $pagestate{$page}{meta}{guid}=HTML::Entities::encode_numeric($value);
+ $pagestate{$page}{meta}{guid}=$value;
# fallthrough
}
elsif ($key eq 'license') {
@@ -115,12 +122,22 @@ sub preprocess (@) {
}
elsif ($key eq 'author') {
$pagestate{$page}{meta}{author}=$value;
+ if (exists $params{sortas}) {
+ $pagestate{$page}{meta}{authorsort}=$params{sortas};
+ }
+ else {
+ delete $pagestate{$page}{meta}{authorsort};
+ }
# fallthorough
}
elsif ($key eq 'authorurl') {
$pagestate{$page}{meta}{authorurl}=$value if safeurl($value);
# fallthrough
}
+ elsif ($key eq 'permalink') {
+ $pagestate{$page}{meta}{permalink}=$value if safeurl($value);
+ # fallthrough
+ }
elsif ($key eq 'date') {
eval q{use Date::Parse};
if (! $@) {
@@ -141,10 +158,9 @@ sub preprocess (@) {
return;
}
- # Metadata collection that happens only during preprocessing pass.
+ # Metadata handling that happens only during preprocessing pass.
if ($key eq 'permalink') {
if (safeurl($value)) {
- $pagestate{$page}{meta}{permalink}=$value;
push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />', $destpage);
}
}
@@ -191,11 +207,11 @@ sub preprocess (@) {
if ($value !~ /^\w+:\/\//) {
my ($redir_page, $redir_anchor) = split /\#/, $value;
- add_depends($page, $redir_page);
my $link=bestlink($page, $redir_page);
if (! length $link) {
error gettext("redir page not found")
}
+ add_depends($page, $link, deptype("presence"));
$value=urlto($link, $page);
$value.='#'.$redir_anchor if defined $redir_anchor;
@@ -236,9 +252,21 @@ sub preprocess (@) {
push @{$metaheaders{$page}}, '<meta name="robots"'.
' content="'.encode_entities($value).'" />';
}
+ elsif ($key eq 'description') {
+ push @{$metaheaders{$page}}, '<meta name="'.
+ encode_entities($key).
+ '" content="'.encode_entities($value).'" />';
+ }
+ elsif ($key eq 'name') {
+ push @{$metaheaders{$page}}, scrub('<meta '.$key.'="'.
+ encode_entities($value).
+ join(' ', map { "$_=\"$params{$_}\"" } keys %params).
+ ' />', $destpage);
+ }
else {
- push @{$metaheaders{$page}}, scrub('<meta name="'.encode_entities($key).
- '" content="'.encode_entities($value).'" />', $destpage);
+ push @{$metaheaders{$page}}, scrub('<meta name="'.
+ encode_entities($key).'" content="'.
+ encode_entities($value).'" />', $destpage);
}
return "";
@@ -256,7 +284,7 @@ sub pagetemplate (@) {
$template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$metaheaders{$page}}));
}
if (exists $pagestate{$page}{meta}{title} && $template->query(name => "title")) {
- $template->param(title => $pagestate{$page}{meta}{title});
+ $template->param(title => HTML::Entities::encode_numeric($pagestate{$page}{meta}{title}));
$template->param(title_overridden => 1);
}
@@ -265,6 +293,11 @@ sub pagetemplate (@) {
if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);
}
+ foreach my $field (qw{description}) {
+ $template->param($field => HTML::Entities::encode_numeric($pagestate{$page}{meta}{$field}))
+ if exists $pagestate{$page}{meta}{$field} && $template->query(name => $field);
+ }
+
foreach my $field (qw{license copyright}) {
if (exists $pagestate{$page}{meta}{$field} && $template->query(name => $field) &&
($page eq $destpage || ! exists $pagestate{$destpage}{meta}{$field} ||
@@ -274,6 +307,33 @@ sub pagetemplate (@) {
}
}
+sub get_sort_key {
+ my $page = shift;
+ my $meta = shift;
+
+ # e.g. titlesort (also makes sense for author)
+ my $key = $pagestate{$page}{meta}{$meta . "sort"};
+ return $key if defined $key;
+
+ # e.g. title
+ $key = $pagestate{$page}{meta}{$meta};
+ return $key if defined $key;
+
+ # fall back to closer-to-core things
+ if ($meta eq 'title') {
+ return pagetitle(IkiWiki::basename($page));
+ }
+ elsif ($meta eq 'date') {
+ return $IkiWiki::pagectime{$page};
+ }
+ elsif ($meta eq 'updated') {
+ return $IkiWiki::pagemtime{$page};
+ }
+ else {
+ return '';
+ }
+}
+
sub match {
my $field=shift;
my $page=shift;
@@ -291,21 +351,21 @@ sub match {
if (defined $val) {
if ($val=~/^$re$/i) {
- return IkiWiki::SuccessReason->new("$re matches $field of $page");
+ return IkiWiki::SuccessReason->new("$re matches $field of $page", $page => $IkiWiki::DEPEND_CONTENT, "" => 1);
}
else {
- return IkiWiki::FailReason->new("$re does not match $field of $page");
+ return IkiWiki::FailReason->new("$re does not match $field of $page", $page => $IkiWiki::DEPEND_CONTENT, "" => 1);
}
}
else {
- return IkiWiki::FailReason->new("$page does not have a $field");
+ return IkiWiki::FailReason->new("$page does not have a $field", $page => $IkiWiki::DEPEND_CONTENT);
}
}
package IkiWiki::PageSpec;
sub match_title ($$;@) {
- IkiWiki::Plugin::meta::match("title", @_);
+ IkiWiki::Plugin::meta::match("title", @_);
}
sub match_author ($$;@) {
@@ -324,4 +384,31 @@ sub match_copyright ($$;@) {
IkiWiki::Plugin::meta::match("copyright", @_);
}
+sub match_guid ($$;@) {
+ IkiWiki::Plugin::meta::match("guid", @_);
+}
+
+package IkiWiki::SortSpec;
+
+sub cmp_meta {
+ my $meta = shift;
+ error(gettext("sort=meta requires a parameter")) unless defined $meta;
+
+ if ($meta eq 'updated' || $meta eq 'date') {
+ return IkiWiki::Plugin::meta::get_sort_key($a, $meta)
+ <=>
+ IkiWiki::Plugin::meta::get_sort_key($b, $meta);
+ }
+
+ return IkiWiki::Plugin::meta::get_sort_key($a, $meta)
+ cmp
+ IkiWiki::Plugin::meta::get_sort_key($b, $meta);
+}
+
+# A prototype of how sort=title could behave in 4.0 or something
+sub cmp_meta_title {
+ $_[0] = 'title';
+ return cmp_meta(@_);
+}
+
1
diff --git a/IkiWiki/Plugin/mirrorlist.pm b/IkiWiki/Plugin/mirrorlist.pm
index 737dcf767..f54d94ad5 100644
--- a/IkiWiki/Plugin/mirrorlist.pm
+++ b/IkiWiki/Plugin/mirrorlist.pm
@@ -15,6 +15,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "web",
},
mirrorlist => {
type => "string",
@@ -29,7 +30,8 @@ sub pagetemplate (@) {
my %params=@_;
my $template=$params{template};
- if ($template->query(name => "extrafooter")) {
+ if ($template->query(name => "extrafooter") &&
+ keys %{$config{mirrorlist}} > 0) {
my $value=$template->param("extrafooter");
$value.=mirrorlist($params{page});
$template->param(extrafooter => $value);
@@ -38,7 +40,7 @@ sub pagetemplate (@) {
sub mirrorlist ($) {
my $page=shift;
- return "<p>".
+ return ($config{html5} ? '<nav id="mirrorlist">' : '<div>').
(keys %{$config{mirrorlist}} > 1 ? gettext("Mirrors") : gettext("Mirror")).
": ".
join(", ",
@@ -48,7 +50,7 @@ sub mirrorlist ($) {
qq{">$_</a>}
} keys %{$config{mirrorlist}}
).
- "</p>";
+ ($config{html5} ? '</nav>' : '</div>');
}
1
diff --git a/IkiWiki/Plugin/moderatedcomments.pm b/IkiWiki/Plugin/moderatedcomments.pm
new file mode 100644
index 000000000..5957833fc
--- /dev/null
+++ b/IkiWiki/Plugin/moderatedcomments.pm
@@ -0,0 +1,64 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::moderatedcomments;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "moderatedcomments", call => \&getsetup);
+ hook(type => "checkcontent", id => "moderatedcomments", call => \&checkcontent);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => 0,
+ section => "auth",
+ },
+ moderate_pagespec => {
+ type => 'pagespec',
+ example => '*',
+ description => 'PageSpec matching users or comment locations to moderate',
+ link => 'ikiwiki/PageSpec',
+ safe => 1,
+ rebuild => 0,
+ },
+}
+
+sub checkcontent (@) {
+ my %params=@_;
+
+ # only handle comments
+ return undef unless pagespec_match($params{page}, "postcomment(*)",
+ location => $params{page});
+
+ # backwards compatability
+ if (exists $config{moderate_users} &&
+ ! exists $config{moderate_pagespec}) {
+ $config{moderate_pagespec} = $config{moderate_users}
+ ? "!admin()"
+ : "!user(*)";
+ }
+
+ # default is to moderate all except admins
+ if (! exists $config{moderate_pagespec}) {
+ $config{moderate_pagespec}="!admin()";
+ }
+
+ my $session=$params{session};
+ my $user=$session->param("name");
+ if (pagespec_match($params{page}, $config{moderate_pagespec},
+ location => $params{page},
+ (defined $user ? (user => $user) : ()),
+ (defined $session->remote_addr() ? (ip => $session->remote_addr()) : ()),
+ )) {
+ return gettext("comment needs moderation");
+ }
+ else {
+ return undef;
+ }
+}
+
+1
diff --git a/IkiWiki/Plugin/monotone.pm b/IkiWiki/Plugin/monotone.pm
index bdb564a71..95fbcee76 100644
--- a/IkiWiki/Plugin/monotone.pm
+++ b/IkiWiki/Plugin/monotone.pm
@@ -23,6 +23,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub checkconfig () {
@@ -68,6 +69,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
mtn_wrapper => {
type => "string",
@@ -228,7 +230,7 @@ sub read_certs ($$) {
my @ret;
my $line = $results[0];
- while ($line =~ m/\s+key\s"(.*?)"\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) {
+ while ($line =~ m/\s+key\s["\[](.*?)[\]"]\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) {
push @ret, {
key => $1,
signature => $2,
@@ -291,31 +293,33 @@ sub rcs_prepedit ($) {
return get_rev();
}
-sub rcs_commit ($$$;$$) {
+sub commitauthor (@) {
+ my %params=@_;
+
+ if (defined $params{session}) {
+ if (defined $params{session}->param("name")) {
+ return "Web user: " . $params{session}->param("name");
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ return "Web IP: " . $params{session}->remote_addr();
+ }
+ }
+ return "Web: Anonymous";
+}
+
+
+sub rcs_commit (@) {
# Tries to commit the page; returns undef on _success_ and
# a version of the page with the rcs's conflict markers on failure.
# The file is relative to the srcdir.
- my $file=shift;
- my $message=shift;
- my $rcstoken=shift;
- my $user=shift;
- my $ipaddr=shift;
- my $author;
+ my %params=@_;
- if (defined $user) {
- $author="Web user: " . $user;
- }
- elsif (defined $ipaddr) {
- $author="Web IP: " . $ipaddr;
- }
- else {
- $author="Web: Anonymous";
- }
+ my $author=IkiWiki::possibly_foolish_untaint(commitauthor(%params)),
chdir $config{srcdir}
or error("Cannot chdir to $config{srcdir}: $!");
- my ($oldrev)= $rcstoken=~ m/^($sha1_pattern)$/; # untaint
+ my ($oldrev) = $params{token} =~ m/^($sha1_pattern)$/; # untaint
my $rev = get_rev();
if (defined $rev && defined $oldrev && $rev ne $oldrev) {
my $automator = Monotone->new();
@@ -324,8 +328,8 @@ sub rcs_commit ($$$;$$) {
# Something has been committed, has this file changed?
my ($out, $err);
$automator->setOpts("r", $oldrev, "r", $rev);
- ($out, $err) = $automator->call("content_diff", $file);
- debug("Problem committing $file") if ($err ne "");
+ ($out, $err) = $automator->call("content_diff", $params{file});
+ debug("Problem committing $params{file}") if ($err ne "");
my $diff = $out;
if ($diff) {
@@ -334,11 +338,11 @@ sub rcs_commit ($$$;$$) {
#
# first get the contents
debug("File changed: forming branch");
- my $newfile=readfile("$config{srcdir}/$file");
+ my $newfile=readfile("$config{srcdir}/$params{file}");
# then get the old content ID from the diff
- if ($diff !~ m/^---\s$file\s+($sha1_pattern)$/m) {
- error("Unable to find previous file ID for $file");
+ if ($diff !~ m/^---\s$params{file}\s+($sha1_pattern)$/m) {
+ error("Unable to find previous file ID for $params{file}");
}
my $oldFileID = $1;
@@ -349,13 +353,13 @@ sub rcs_commit ($$$;$$) {
my $branch = $1;
# then put the new content into the DB (and record the new content ID)
- my $newRevID = commit_file_to_new_rev($automator, $file, $oldFileID, $newfile, $oldrev, $branch, $author, $message);
+ my $newRevID = commit_file_to_new_rev($automator, $params{file}, $oldFileID, $newfile, $oldrev, $branch, $author, $params{message});
$automator->close();
# if we made it to here then the file has been committed... revert the local copy
- if (system("mtn", "--root=$config{mtnrootdir}", "revert", $file) != 0) {
- debug("Unable to revert $file after merge on conflicted commit!");
+ if (system("mtn", "--root=$config{mtnrootdir}", "revert", $params{file}) != 0) {
+ debug("Unable to revert $params{file} after merge on conflicted commit!");
}
debug("Divergence created! Attempting auto-merge.");
@@ -404,7 +408,7 @@ sub rcs_commit ($$$;$$) {
# for cleanup note, this relies on the fact
# that ikiwiki seems to call rcs_prepedit()
# again after we return
- return readfile("$config{srcdir}/$file");
+ return readfile("$config{srcdir}/$params{file}");
}
return undef;
}
@@ -416,11 +420,12 @@ sub rcs_commit ($$$;$$) {
if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet",
"--author", $author, "--key", $config{mtnkey}, "-m",
- IkiWiki::possibly_foolish_untaint($message), $file) != 0) {
+ IkiWiki::possibly_foolish_untaint($params{message}),
+ $params{file}) != 0) {
debug("Traditional commit failed! Returning data as conflict.");
- my $conflict=readfile("$config{srcdir}/$file");
+ my $conflict=readfile("$config{srcdir}/$params{file}");
if (system("mtn", "--root=$config{mtnrootdir}", "revert",
- "--quiet", $file) != 0) {
+ "--quiet", $params{file}) != 0) {
debug("monotone revert failed");
}
return $conflict;
@@ -436,32 +441,21 @@ sub rcs_commit ($$$;$$) {
return undef # success
}
-sub rcs_commit_staged ($$$) {
+sub rcs_commit_staged (@) {
# Commits all staged changes. Changes can be staged using rcs_add,
# rcs_remove, and rcs_rename.
- my ($message, $user, $ipaddr)=@_;
-
+ my %params=@_;
+
# Note - this will also commit any spurious changes that happen to be
# lying around in the working copy. There shouldn't be any, but...
chdir $config{srcdir}
or error("Cannot chdir to $config{srcdir}: $!");
- my $author;
-
- if (defined $user) {
- $author="Web user: " . $user;
- }
- elsif (defined $ipaddr) {
- $author="Web IP: " . $ipaddr;
- }
- else {
- $author="Web: Anonymous";
- }
-
if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet",
- "--author", $author, "--key", $config{mtnkey}, "-m",
- IkiWiki::possibly_foolish_untaint($message)) != 0) {
+ "--author", IkiWiki::possibly_foolish_untaint(commitauthor(%params)),
+ "--key", $config{mtnkey}, "-m",
+ IkiWiki::possibly_foolish_untaint($params{message})) != 0) {
error("Monotone commit failed");
}
}
@@ -558,7 +552,8 @@ sub rcs_recentchanges ($) {
# from the changelog
if ($cert->{key} eq $config{mtnkey}) {
$committype = "web";
- } else {
+ }
+ else {
$committype = "mtn";
}
} elsif ($cert->{name} eq "date") {
@@ -575,13 +570,12 @@ sub rcs_recentchanges ($) {
}
my @changed_files = get_changed_files($automator, $rev);
- my $file;
my ($out, $err) = $automator->call("parents", $rev);
my @parents = ($out =~ m/^($sha1_pattern)$/);
my $parent = $parents[0];
- foreach $file (@changed_files) {
+ foreach my $file (@changed_files) {
next unless length $file;
if (defined $config{diffurl} and (@parents == 1)) {
@@ -692,4 +686,8 @@ sub rcs_getctime ($) {
return $date;
}
+sub rcs_getmtime ($) {
+ error "rcs_getmtime is not implemented for monotone\n"; # TODO
+}
+
1
diff --git a/IkiWiki/Plugin/more.pm b/IkiWiki/Plugin/more.pm
index 77d5fb077..80e339a1b 100644
--- a/IkiWiki/Plugin/more.pm
+++ b/IkiWiki/Plugin/more.pm
@@ -17,6 +17,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -32,9 +33,9 @@ sub preprocess (@) {
anchor => "more");
}
else {
- $params{text}=IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage}, $params{text}));
- return "<a name=\"more\"></a>\n\n".$params{text};
+ return "<a name=\"more\"></a>\n\n".
+ IkiWiki::preprocess($params{page}, $params{destpage},
+ $params{text});
}
}
diff --git a/IkiWiki/Plugin/norcs.pm b/IkiWiki/Plugin/norcs.pm
index bfe84c0e1..a3bb6240e 100644
--- a/IkiWiki/Plugin/norcs.pm
+++ b/IkiWiki/Plugin/norcs.pm
@@ -18,6 +18,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub getsetup () {
@@ -25,6 +26,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => 0,
+ section => "rcs",
},
}
@@ -36,13 +38,11 @@ sub rcs_prepedit ($) {
return ""
}
-sub rcs_commit ($$$;$$) {
- my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
+sub rcs_commit (@) {
return undef # success
}
-sub rcs_commit_staged ($$$) {
- my ($message, $user, $ipaddr)=@_;
+sub rcs_commit_staged (@) {
return undef # success
}
@@ -62,7 +62,11 @@ sub rcs_diff ($) {
}
sub rcs_getctime ($) {
- error gettext("getctime not implemented");
+ return 0;
+}
+
+sub rcs_getmtime ($) {
+ return 0;
}
1
diff --git a/IkiWiki/Plugin/opendiscussion.pm b/IkiWiki/Plugin/opendiscussion.pm
index 60b193eca..2805f60ef 100644
--- a/IkiWiki/Plugin/opendiscussion.pm
+++ b/IkiWiki/Plugin/opendiscussion.pm
@@ -7,7 +7,8 @@ use IkiWiki 3.00;
sub import {
hook(type => "getsetup", id => "opendiscussion", call => \&getsetup);
- hook(type => "canedit", id => "opendiscussion", call => \&canedit);
+ hook(type => "canedit", id => "opendiscussion", call => \&canedit,
+ first => 1);
}
sub getsetup () {
@@ -15,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
}
@@ -23,8 +25,8 @@ sub canedit ($$) {
my $cgi=shift;
my $session=shift;
- my $discussion=lc(gettext("Discussion"));
- return "" if $page=~/(\/|^)\Q$discussion\E$/;
+ return "" if $page=~/(\/|^)\Q$config{discussionpage}\E$/i;
+ return "" if pagespec_match($page, "postcomment(*)");
return undef;
}
diff --git a/IkiWiki/Plugin/openid.pm b/IkiWiki/Plugin/openid.pm
index dc0e0f48e..fae9fb77f 100644
--- a/IkiWiki/Plugin/openid.pm
+++ b/IkiWiki/Plugin/openid.pm
@@ -7,18 +7,30 @@ use strict;
use IkiWiki 3.00;
sub import {
- hook(type => "getopt", id => "openid", call => \&getopt);
+ add_underlay("openid-selector");
+ hook(type => "checkconfig", id => "openid", call => \&checkconfig);
hook(type => "getsetup", id => "openid", call => \&getsetup);
hook(type => "auth", id => "openid", call => \&auth);
hook(type => "formbuilder_setup", id => "openid",
call => \&formbuilder_setup, last => 1);
}
-sub getopt () {
- eval q{use Getopt::Long};
- error($@) if $@;
- Getopt::Long::Configure('pass_through');
- GetOptions("openidsignup=s" => \$config{openidsignup});
+sub checkconfig () {
+ if ($config{cgi}) {
+ # Intercept normal signin form, so the openid selector
+ # can be displayed.
+ #
+ # When other auth hooks are registered, give the selector
+ # a reference to the normal signin form.
+ require IkiWiki::CGI;
+ my $real_cgi_signin;
+ if (keys %{$IkiWiki::hooks{auth}} > 1) {
+ $real_cgi_signin=\&IkiWiki::cgi_signin;
+ }
+ inject(name => "IkiWiki::cgi_signin", call => sub ($$) {
+ openid_selector($real_cgi_signin, @_);
+ });
+ }
}
sub getsetup () {
@@ -26,16 +38,56 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
- openidsignup => {
+ openid_realm => {
type => "string",
- example => "http://myopenid.com/",
- description => "an url where users can signup for an OpenID",
- safe => 1,
+ description => "url pattern of openid realm (default is cgiurl)",
+ safe => 0,
+ rebuild => 0,
+ },
+ openid_cgiurl => {
+ type => "string",
+ description => "url to ikiwiki cgi to use for openid authentication (default is cgiurl)",
+ safe => 0,
rebuild => 0,
},
}
+sub openid_selector {
+ my $real_cgi_signin=shift;
+ my $q=shift;
+ my $session=shift;
+
+ my $openid_url=$q->param('openid_identifier');
+ my $openid_error;
+
+ if (! load_openid_module()) {
+ if ($real_cgi_signin) {
+ $real_cgi_signin->($q, $session);
+ exit;
+ }
+ error(sprintf(gettext("failed to load openid module: "), @_));
+ }
+ elsif (defined $q->param("action") && $q->param("action") eq "verify") {
+ validate($q, $session, $openid_url, sub {
+ $openid_error=shift;
+ });
+ }
+
+ my $template=IkiWiki::template("openid-selector.tmpl");
+ $template->param(
+ cgiurl => $config{cgiurl},
+ (defined $openid_error ? (openid_error => $openid_error) : ()),
+ (defined $openid_url ? (openid_url => $openid_url) : ()),
+ ($real_cgi_signin ? (nonopenidform => $real_cgi_signin->($q, $session, 1)) : ()),
+ );
+
+ IkiWiki::printheader($session);
+ print IkiWiki::misctemplate("signin", $template->output);
+ exit;
+}
+
sub formbuilder_setup (@) {
my %params=@_;
@@ -43,52 +95,14 @@ sub formbuilder_setup (@) {
my $session=$params{session};
my $cgi=$params{cgi};
- if ($form->title eq "signin") {
- # Give up if module is unavailable to avoid
- # needing to depend on it.
- eval q{use Net::OpenID::Consumer};
- if ($@) {
- debug("unable to load Net::OpenID::Consumer, not enabling OpenID login ($@)");
- return;
- }
-
- # This avoids it displaying a redundant label for the
- # OpenID fieldset.
- $form->fieldsets("OpenID");
-
- $form->field(
- name => "openid_url",
- label => gettext("Log in with")." ".htmllink("", "", "ikiwiki/OpenID", noimageinline => 1),
- fieldset => "OpenID",
- size => 30,
- comment => ($config{openidsignup} ? " | <a href=\"$config{openidsignup}\">".gettext("Get an OpenID")."</a>" : "")
- );
-
- # Handle submission of an OpenID as validation.
- if ($form->submitted && $form->submitted eq "Login" &&
- defined $form->field("openid_url") &&
- length $form->field("openid_url")) {
- $form->field(
- name => "openid_url",
- validate => sub {
- validate($cgi, $session, shift, $form);
- },
- );
- # Skip all other required fields in this case.
- foreach my $field ($form->field) {
- next if $field eq "openid_url";
- $form->field(name => $field, required => 0,
- validate => '/.*/');
- }
- }
- }
- elsif ($form->title eq "preferences") {
- if (! defined $form->field(name => "name")) {
- $form->field(name => "OpenID", disabled => 1,
- value => $session->param("name"),
- size => 50, force => 1,
- fieldset => "login");
- }
+ if ($form->title eq "preferences" &&
+ IkiWiki::openiduser($session->param("name"))) {
+ $form->field(name => "openid_identifier", disabled => 1,
+ label => htmllink("", "", "ikiwiki/OpenID", noimageinline => 1),
+ value => $session->param("name"),
+ size => length($session->param("name")), force => 1,
+ fieldset => "login");
+ $form->field(name => "email", type => "hidden");
}
}
@@ -96,15 +110,14 @@ sub validate ($$$;$) {
my $q=shift;
my $session=shift;
my $openid_url=shift;
- my $form=shift;
+ my $errhandler=shift;
my $csr=getobj($q, $session);
my $claimed_identity = $csr->claimed_identity($openid_url);
if (! $claimed_identity) {
- if ($form) {
- # Put the error in the form and fail validation.
- $form->field(name => "openid_url", comment => $csr->err);
+ if ($errhandler) {
+ $errhandler->($csr->err);
return 0;
}
else {
@@ -112,9 +125,37 @@ sub validate ($$$;$) {
}
}
+ # Ask for client to provide a name and email, if possible.
+ # Try sreg and ax
+ if ($claimed_identity->can("set_extension_args")) {
+ $claimed_identity->set_extension_args(
+ 'http://openid.net/extensions/sreg/1.1',
+ {
+ optional => 'email,fullname,nickname',
+ },
+ );
+ $claimed_identity->set_extension_args(
+ 'http://openid.net/srv/ax/1.0',
+ {
+ mode => 'fetch_request',
+ 'required' => 'email,fullname,nickname,firstname',
+ 'type.email' => "http://schema.openid.net/contact/email",
+ 'type.fullname' => "http://axschema.org/namePerson",
+ 'type.nickname' => "http://axschema.org/namePerson/friendly",
+ 'type.firstname' => "http://axschema.org/namePerson/first",
+ },
+ );
+ }
+
+ my $cgiurl=$config{openid_cgiurl};
+ $cgiurl=$config{cgiurl} if ! defined $cgiurl;
+
+ my $trust_root=$config{openid_realm};
+ $trust_root=$cgiurl if ! defined $trust_root;
+
my $check_url = $claimed_identity->check_url(
- return_to => IkiWiki::cgiurl(do => "postsignin"),
- trust_root => $config{cgiurl},
+ return_to => "$cgiurl?do=postsignin",
+ trust_root => $trust_root,
delayed_return => 1,
);
# Redirect the user to the OpenID server, which will
@@ -138,6 +179,41 @@ sub auth ($$) {
}
elsif (my $vident = $csr->verified_identity) {
$session->param(name => $vident->url);
+
+ my @extensions;
+ if ($vident->can("signed_extension_fields")) {
+ @extensions=grep { defined } (
+ $vident->signed_extension_fields('http://openid.net/extensions/sreg/1.1'),
+ $vident->signed_extension_fields('http://openid.net/srv/ax/1.0'),
+ );
+ }
+ my $nickname;
+ foreach my $ext (@extensions) {
+ foreach my $field (qw{value.email email}) {
+ if (exists $ext->{$field} &&
+ defined $ext->{$field} &&
+ length $ext->{$field}) {
+ $session->param(email => $ext->{$field});
+ if (! defined $nickname &&
+ $ext->{$field}=~/(.+)@.+/) {
+ $nickname = $1;
+ }
+ last;
+ }
+ }
+ foreach my $field (qw{value.nickname nickname value.fullname fullname value.firstname}) {
+ if (exists $ext->{$field} &&
+ defined $ext->{$field} &&
+ length $ext->{$field}) {
+ $nickname=$ext->{$field};
+ last;
+ }
+ }
+ }
+ if (defined $nickname) {
+ $session->param(nickname =>
+ Encode::decode_utf8($nickname));
+ }
}
else {
error("OpenID failure: ".$csr->err);
@@ -171,13 +247,26 @@ sub getobj ($$) {
$secret=rand;
$session->param(openid_secret => $secret);
}
+
+ my $cgiurl=$config{openid_cgiurl};
+ $cgiurl=$config{cgiurl} if ! defined $cgiurl;
return Net::OpenID::Consumer->new(
ua => $ua,
args => $q,
consumer_secret => sub { return shift()+$secret },
- required_root => $config{cgiurl},
+ required_root => $cgiurl,
);
}
+sub load_openid_module {
+ # Give up if module is unavailable to avoid needing to depend on it.
+ eval q{use Net::OpenID::Consumer};
+ if ($@) {
+ debug("unable to load Net::OpenID::Consumer, not enabling OpenID login ($@)");
+ return;
+ }
+ return 1;
+}
+
1
diff --git a/IkiWiki/Plugin/orphans.pm b/IkiWiki/Plugin/orphans.pm
index 7c938ef74..e3cc3c940 100644
--- a/IkiWiki/Plugin/orphans.pm
+++ b/IkiWiki/Plugin/orphans.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -23,31 +24,34 @@ sub preprocess (@) {
my %params=@_;
$params{pages}="*" unless defined $params{pages};
- # Needs to update whenever a page is added or removed, so
- # register a dependency.
- add_depends($params{page}, $params{pages});
+ # Needs to update whenever a link changes, on any page
+ # since any page could link to one of the pages we're
+ # considering as orphans.
+ add_depends($params{page}, "*", deptype("links"));
- my %linkedto;
- foreach my $p (keys %links) {
- map { $linkedto{bestlink($p, $_)}=1 if length $_ }
- @{$links{$p}};
- }
-
- my @orphans;
- my $discussion=lc(gettext("Discussion"));
- foreach my $page (pagespec_match_list(
- [ grep { ! $linkedto{$_} && $_ ne 'index' }
- keys %pagesources ],
- $params{pages}, location => $params{page})) {
- # If the page has a link to some other page, it's
- # indirectly linked to a page via that page's backlinks.
- next if grep {
- length $_ &&
- ($_ !~ /\/\Q$discussion\E$/i || ! $config{discussion}) &&
- bestlink($page, $_) !~ /^(\Q$page\E|)$/
- } @{$links{$page}};
- push @orphans, $page;
- }
+ my @orphans=pagespec_match_list($params{page}, $params{pages},
+ # update when orphans are added/removed
+ deptype => deptype("presence"),
+ filter => sub {
+ my $page=shift;
+
+ # Filter out pages that other pages link to.
+ return 1 if IkiWiki::backlink_pages($page);
+
+ # Toplevel index is assumed to never be orphaned.
+ return 1 if $page eq 'index';
+
+ # If the page has a link to some other page, it's
+ # indirectly linked via that page's backlinks.
+ return 1 if grep {
+ length $_ &&
+ ($_ !~ /\/\Q$config{discussionpage}\E$/i || ! $config{discussion}) &&
+ bestlink($page, $_) !~ /^(\Q$page\E|)$/
+ } @{$links{$page}};
+
+ return 0;
+ },
+ );
return gettext("All pages have other pages linking to them.") unless @orphans;
return "<ul>\n".
diff --git a/IkiWiki/Plugin/otl.pm b/IkiWiki/Plugin/otl.pm
index c68fcbbe3..3801a6ec2 100644
--- a/IkiWiki/Plugin/otl.pm
+++ b/IkiWiki/Plugin/otl.pm
@@ -9,9 +9,7 @@ use open qw{:utf8 :std};
sub import {
hook(type => "getsetup", id => "otl", call => \&getsetup);
- hook(type => "filter", id => "otl", call => \&filter);
hook(type => "htmlize", id => "otl", call => \&htmlize);
-
}
sub getsetup () {
@@ -19,25 +17,20 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
}
-sub filter (@) {
+sub htmlize (@) {
my %params=@_;
-
- # Munge up check boxes to look a little bit better. This is a hack.
+
+ # Munge up check boxes to look a little bit better.
my $checked=htmllink($params{page}, $params{page},
"smileys/star_on.png", linktext => "[X]");
my $unchecked=htmllink($params{page}, $params{page},
"smileys/star_off.png", linktext => "[_]");
$params{content}=~s/^(\s*)\[X\]\s/${1}$checked /mg;
$params{content}=~s/^(\s*)\[_\]\s/${1}$unchecked /mg;
-
- return $params{content};
-}
-
-sub htmlize (@) {
- my %params=@_;
# Can't use open2 since otl2html doesn't play nice with buffering.
# Instead, fork off a child process that will run otl2html and feed
diff --git a/IkiWiki/Plugin/pagecount.pm b/IkiWiki/Plugin/pagecount.pm
index 5a2301af4..dd5de3c83 100644
--- a/IkiWiki/Plugin/pagecount.pm
+++ b/IkiWiki/Plugin/pagecount.pm
@@ -15,25 +15,26 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
sub preprocess (@) {
my %params=@_;
- $params{pages}="*" unless defined $params{pages};
+ my $pages=defined $params{pages} ? $params{pages} : "*";
- # Needs to update count whenever a page is added or removed, so
- # register a dependency.
- add_depends($params{page}, $params{pages});
-
- my @pages;
- if ($params{pages} eq "*") {
- @pages=keys %pagesources;
- }
- else {
- @pages=pagespec_match_list([keys %pagesources], $params{pages}, location => $params{page});
+ # Just get a list of all the pages, and count the items in it.
+ # Use a presence dependency to only update when pages are added
+ # or removed.
+
+ if ($pages eq '*') {
+ # optimisation to avoid needing to try matching every page
+ add_depends($params{page}, $pages, deptype("presence"));
+ return scalar keys %pagesources;
}
- return $#pages+1;
+
+ return scalar pagespec_match_list($params{page}, $pages,
+ deptype => deptype("presence"));
}
1
diff --git a/IkiWiki/Plugin/pagestats.pm b/IkiWiki/Plugin/pagestats.pm
index 874ead7e6..17b26f7ba 100644
--- a/IkiWiki/Plugin/pagestats.pm
+++ b/IkiWiki/Plugin/pagestats.pm
@@ -27,6 +27,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -35,30 +36,46 @@ sub preprocess (@) {
$params{pages}="*" unless defined $params{pages};
my $style = ($params{style} or 'cloud');
- # Needs to update whenever a page is added or removed, so
- # register a dependency.
- add_depends($params{page}, $params{pages});
- add_depends($params{page}, $params{among}) if exists $params{among};
-
my %counts;
my $max = 0;
- foreach my $page (pagespec_match_list([keys %links],
- $params{pages}, location => $params{page})) {
+ foreach my $page (pagespec_match_list($params{page}, $params{pages},
+ # update when a displayed page is added/removed
+ deptype => deptype("presence"))) {
use IkiWiki::Render;
my @backlinks = IkiWiki::backlink_pages($page);
if (exists $params{among}) {
- @backlinks = pagespec_match_list(\@backlinks,
- $params{among}, location => $params{page});
+ # only consider backlinks from the amoung pages
+ @backlinks = pagespec_match_list(
+ $params{page}, $params{among},
+ # update whenever links on those pages change
+ deptype => deptype("links"),
+ list => \@backlinks
+ );
+ }
+ else {
+ # update when any page with links changes,
+ # in case the links point to our displayed pages
+ add_depends($params{page}, "*", deptype("links"));
}
$counts{$page} = scalar(@backlinks);
$max = $counts{$page} if $counts{$page} > $max;
}
+ if (exists $params{show}) {
+ my $i=0;
+ my %show;
+ foreach my $key (sort { $counts{$b} <=> $counts{$a} } keys %counts) {
+ last if ++$i > $params{show};
+ $show{$key}=$counts{$key};
+ }
+ %counts=%show;
+ }
+
if ($style eq 'table') {
- return "<table class='pageStats'>\n".
+ return "<table class='".(exists $params{class} ? $params{class} : "pageStats")."'>\n".
join("\n", map {
"<tr><td>".
htmllink($params{page}, $params{destpage}, $_, noimageinline => 1).
@@ -70,16 +87,31 @@ sub preprocess (@) {
else {
# In case of misspelling, default to a page cloud
- my $res = "<div class='pagecloud'>\n";
+ my $res;
+ if ($style eq 'list') {
+ $res = "<ul class='".(exists $params{class} ? $params{class} : "list")."'>\n";
+ }
+ else {
+ $res = "<div class='".(exists $params{class} ? $params{class} : "pagecloud")."'>\n";
+ }
foreach my $page (sort keys %counts) {
next unless $counts{$page} > 0;
my $class = $classes[$counts{$page} * scalar(@classes) / ($max + 1)];
+
+ $res.="<li>" if $style eq 'list';
$res .= "<span class=\"$class\">".
htmllink($params{page}, $params{destpage}, $page).
"</span>\n";
+ $res.="</li>" if $style eq 'list';
+
+ }
+ if ($style eq 'list') {
+ $res .= "</ul>\n";
+ }
+ else {
+ $res .= "</div>\n";
}
- $res .= "</div>\n";
return $res;
}
diff --git a/IkiWiki/Plugin/parentlinks.pm b/IkiWiki/Plugin/parentlinks.pm
index 1ee69cbff..bbd2c5752 100644
--- a/IkiWiki/Plugin/parentlinks.pm
+++ b/IkiWiki/Plugin/parentlinks.pm
@@ -9,6 +9,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "parentlinks", id => "parentlinks", call => \&parentlinks);
hook(type => "pagetemplate", id => "parentlinks", call => \&pagetemplate);
+ hook(type => "getsetup", id => "parentlinks", call => \&getsetup);
}
sub getsetup () {
@@ -16,12 +17,21 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "core",
},
}
sub parentlinks ($) {
my $page=shift;
+ if (! length $page) {
+ # dynamic page
+ return {
+ url => $config{url},
+ page => $config{wikiname},
+ };
+ }
+
my @ret;
my $path="";
my $title=$config{wikiname};
@@ -52,11 +62,13 @@ sub parentlinks ($) {
sub pagetemplate (@) {
my %params=@_;
- my $page=$params{page};
my $template=$params{template};
- if ($template->query(name => "parentlinks")) {
- $template->param(parentlinks => [parentlinks($page)]);
+ if ($template->query(name => "parentlinks") ||
+ $template->query(name => "has_parentlinks")) {
+ my @links=parentlinks($params{page});
+ $template->param(parentlinks => \@links);
+ $template->param(has_parentlinks => (@links > 0));
}
}
diff --git a/IkiWiki/Plugin/passwordauth.pm b/IkiWiki/Plugin/passwordauth.pm
index 8cf5af51e..35ebd961f 100644
--- a/IkiWiki/Plugin/passwordauth.pm
+++ b/IkiWiki/Plugin/passwordauth.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
account_creation_password => {
type => "string",
@@ -104,11 +105,13 @@ sub formbuilder_setup (@) {
my $session=$params{session};
my $cgi=$params{cgi};
- if ($form->title eq "signin" || $form->title eq "register") {
+ my $do_register=defined $cgi->param("do") && $cgi->param("do") eq "register";
+
+ if ($form->title eq "signin" || $form->title eq "register" || $do_register) {
$form->field(name => "name", required => 0);
$form->field(name => "password", type => "password", required => 0);
- if ($form->submitted eq "Register" || $form->submitted eq "Create Account") {
+ if ($form->submitted eq "Register" || $form->submitted eq "Create Account" || $do_register) {
$form->field(name => "confirm_password", type => "password");
$form->field(name => "account_creation_password", type => "password")
if (defined $config{account_creation_password} &&
@@ -207,19 +210,34 @@ sub formbuilder_setup (@) {
}
}
elsif ($form->title eq "preferences") {
- $form->field(name => "name", disabled => 1,
- value => $session->param("name"), force => 1,
- fieldset => "login");
- $form->field(name => "password", type => "password",
- fieldset => "login",
- validate => sub {
- shift eq $form->field("confirm_password");
- }),
- $form->field(name => "confirm_password", type => "password",
- fieldset => "login",
- validate => sub {
- shift eq $form->field("password");
- }),
+ my $user=$session->param("name");
+ if (! IkiWiki::openiduser($user)) {
+ $form->field(name => "name", disabled => 1,
+ value => $user, force => 1,
+ fieldset => "login");
+ $form->field(name => "password", type => "password",
+ fieldset => "login",
+ validate => sub {
+ shift eq $form->field("confirm_password");
+ });
+ $form->field(name => "confirm_password", type => "password",
+ fieldset => "login",
+ validate => sub {
+ shift eq $form->field("password");
+ });
+
+ my $userpage=IkiWiki::userpage($user);
+ if (exists $pagesources{$userpage}) {
+ $form->text(gettext("Your user page: ").
+ htmllink("", "", $userpage,
+ noimageinline => 1));
+ }
+ else {
+ $form->text("<a href=\"".
+ IkiWiki::cgiurl(do => "edit", page => $userpage).
+ "\">".gettext("Create your user page")."</a>");
+ }
+ }
}
}
@@ -231,8 +249,10 @@ sub formbuilder (@) {
my $cgi=$params{cgi};
my $buttons=$params{buttons};
+ my $do_register=defined $cgi->param("do") && $cgi->param("do") eq "register";
+
if ($form->title eq "signin" || $form->title eq "register") {
- if ($form->submitted && $form->validate) {
+ if (($form->submitted && $form->validate) || $do_register) {
if ($form->submitted eq 'Login') {
$session->param("name", $form->field("name"));
IkiWiki::cgi_postsignin($cgi, $session);
@@ -277,7 +297,7 @@ sub formbuilder (@) {
),
wikiurl => $config{url},
wikiname => $config{wikiname},
- REMOTE_ADDR => $ENV{REMOTE_ADDR},
+ remote_addr => $session->remote_addr(),
);
eval q{use Mail::Sendmail};
@@ -295,7 +315,7 @@ sub formbuilder (@) {
$form->field(name => "name", required => 0);
push @$buttons, "Reset Password";
}
- elsif ($form->submitted eq "Register") {
+ elsif ($form->submitted eq "Register" || $do_register) {
@$buttons="Create Account";
}
}
@@ -336,6 +356,14 @@ sub sessioncgi ($$) {
IkiWiki::cgi_prefs($q, $session);
exit;
}
+ elsif ($q->param("do") eq "register") {
+ # After registration, need to go somewhere, so show prefs page.
+ $session->param(postsignin => "do=prefs");
+ # Due to do=register, this will run in registration-only
+ # mode.
+ IkiWiki::cgi_signin($q, $session);
+ exit;
+ }
}
sub auth ($$) {
diff --git a/IkiWiki/Plugin/po.pm b/IkiWiki/Plugin/po.pm
index 0ae4adcfc..6395ebdc2 100644
--- a/IkiWiki/Plugin/po.pm
+++ b/IkiWiki/Plugin/po.pm
@@ -10,7 +10,12 @@ use warnings;
use strict;
use IkiWiki 3.00;
use Encode;
-use Locale::Po4a::Common qw(nowrapi18n !/.*/);
+eval q{use Locale::Po4a::Common qw(nowrapi18n !/.*/)};
+if ($@) {
+ print STDERR gettext("warning: Old po4a detected! Recommend upgrade to 0.35.")."\n";
+ eval q{use Locale::Po4a::Common qw(!/.*/)};
+ die $@ if $@;
+}
use Locale::Po4a::Chooser;
use Locale::Po4a::Po;
use File::Basename;
@@ -23,6 +28,7 @@ use UNIVERSAL;
my %translations;
my @origneedsbuild;
my %origsubs;
+my @slavelanguages; # language codes ordered as in config po_slave_languages
memoize("istranslatable");
memoize("_istranslation");
@@ -46,16 +52,22 @@ sub import {
hook(type => "formbuilder_setup", id => "po", call => \&formbuilder_setup, last => 1);
hook(type => "formbuilder", id => "po", call => \&formbuilder);
- $origsubs{'bestlink'}=\&IkiWiki::bestlink;
- inject(name => "IkiWiki::bestlink", call => \&mybestlink);
- $origsubs{'beautify_urlpath'}=\&IkiWiki::beautify_urlpath;
- inject(name => "IkiWiki::beautify_urlpath", call => \&mybeautify_urlpath);
- $origsubs{'targetpage'}=\&IkiWiki::targetpage;
- inject(name => "IkiWiki::targetpage", call => \&mytargetpage);
- $origsubs{'urlto'}=\&IkiWiki::urlto;
- inject(name => "IkiWiki::urlto", call => \&myurlto);
- $origsubs{'cgiurl'}=\&IkiWiki::cgiurl;
- inject(name => "IkiWiki::cgiurl", call => \&mycgiurl);
+ if (! %origsubs) {
+ $origsubs{'bestlink'}=\&IkiWiki::bestlink;
+ inject(name => "IkiWiki::bestlink", call => \&mybestlink);
+ $origsubs{'beautify_urlpath'}=\&IkiWiki::beautify_urlpath;
+ inject(name => "IkiWiki::beautify_urlpath", call => \&mybeautify_urlpath);
+ $origsubs{'targetpage'}=\&IkiWiki::targetpage;
+ inject(name => "IkiWiki::targetpage", call => \&mytargetpage);
+ $origsubs{'urlto'}=\&IkiWiki::urlto;
+ inject(name => "IkiWiki::urlto", call => \&myurlto);
+ $origsubs{'cgiurl'}=\&IkiWiki::cgiurl;
+ inject(name => "IkiWiki::cgiurl", call => \&mycgiurl);
+ $origsubs{'rootpage'}=\&IkiWiki::rootpage;
+ inject(name => "IkiWiki::rootpage", call => \&myrootpage);
+ $origsubs{'isselflink'}=\&IkiWiki::isselflink;
+ inject(name => "IkiWiki::isselflink", call => \&myisselflink);
+ }
}
@@ -78,7 +90,8 @@ sub getsetup () {
return
plugin => {
safe => 0,
- rebuild => 1,
+ rebuild => 1, # format plugin
+ section => "format",
},
po_master_language => {
type => "string",
@@ -92,11 +105,11 @@ sub getsetup () {
},
po_slave_languages => {
type => "string",
- example => {
- 'fr' => 'Français',
- 'es' => 'Castellano',
- 'de' => 'Deutsch'
- },
+ example => [
+ 'fr|Français',
+ 'es|Español',
+ 'de|Deutsch'
+ ],
description => "slave languages (PO files)",
safe => 1,
rebuild => 1,
@@ -126,10 +139,32 @@ sub checkconfig () {
}
}
+ if (ref $config{po_slave_languages} eq 'ARRAY') {
+ my %slaves;
+ foreach my $pair (@{$config{po_slave_languages}}) {
+ my ($code, $name) = ( $pair =~ /^([a-z]{2})\|(.+)$/ );
+ if (!defined $code || !defined $name) {
+ error(sprintf(gettext("%s has invalid syntax: must use CODE|NAME"),
+ $pair));
+ }
+ $slaves{$code} = $name;
+ push @slavelanguages, $code;
+
+ }
+ $config{po_slave_languages} = \%slaves;
+ }
+ elsif (ref $config{po_slave_languages} eq 'HASH') {
+ @slavelanguages = sort {
+ $config{po_slave_languages}->{$a} cmp $config{po_slave_languages}->{$b};
+ } keys %{$config{po_slave_languages}};
+ }
+
+ delete $config{po_slave_languages}{$config{po_master_language}{code}};;
+
map {
islanguagecode($_)
or error(sprintf(gettext("%s is not a valid language code"), $_));
- } ($config{po_master_language}{code}, keys %{$config{po_slave_languages}});
+ } ($config{po_master_language}{code}, @slavelanguages);
if (! exists $config{po_translatable_pages} ||
! defined $config{po_translatable_pages}) {
@@ -158,7 +193,7 @@ sub checkconfig () {
next if $underlay=~/^locale\//;
# Underlays containing the po files for slave languages.
- foreach my $ll (keys %{$config{po_slave_languages}}) {
+ foreach my $ll (@slavelanguages) {
add_underlay("po/$ll/$underlay")
if -d "$config{underlaydirbase}/po/$ll/$underlay";
}
@@ -166,7 +201,8 @@ sub checkconfig () {
if ($config{po_master_language}{code} ne 'en') {
# Add underlay containing translated source files
# for the master language.
- add_underlay("locale/$config{po_master_language}{code}/$underlay");
+ add_underlay("locale/$config{po_master_language}{code}/$underlay")
+ if -d "$config{underlaydirbase}/locale/$config{po_master_language}{code}/$underlay";
}
}
}
@@ -183,7 +219,7 @@ sub needsbuild () {
# make existing translations depend on the corresponding master page
foreach my $master (keys %translations) {
- map add_depends($_, $master), values %{otherlanguages($master)};
+ map add_depends($_, $master), values %{otherlanguages_pages($master)};
}
}
@@ -199,10 +235,7 @@ sub scan (@) {
if (istranslation($page)) {
foreach my $destpage (@{$links{$page}}) {
if (istranslatable($destpage)) {
- # replace one occurence of $destpage in $links{$page}
- # (we only want to replace the one that was added by
- # IkiWiki::Plugin::link::scan, other occurences may be
- # there for other reasons)
+ # replace the occurence of $destpage in $links{$page}
for (my $i=0; $i<@{$links{$page}}; $i++) {
if (@{$links{$page}}[$i] eq $destpage) {
@{$links{$page}}[$i] = $destpage . '.' . lang($page);
@@ -218,7 +251,7 @@ sub scan (@) {
# make sure any destpage's translations has
# $page in its backlinks
push @{$links{$page}},
- values %{otherlanguages($destpage)};
+ values %{otherlanguages_pages($destpage)};
}
}
}
@@ -276,20 +309,19 @@ sub pagetemplate (@) {
}
if ($template->query(name => "otherlanguages")) {
$template->param(otherlanguages => [otherlanguagesloop($page)]);
- map add_depends($page, $_), (values %{otherlanguages($page)});
+ map add_depends($page, $_), (values %{otherlanguages_pages($page)});
}
if ($config{discussion} && istranslation($page)) {
- my $discussionlink=gettext("discussion");
- if ($page !~ /.*\/\Q$discussionlink\E$/i &&
+ if ($page !~ /.*\/\Q$config{discussionpage}\E$/i &&
(length $config{cgiurl} ||
- exists $links{$masterpage."/".$discussionlink})) {
+ exists $links{$masterpage."/".lc($config{discussionpage})})) {
$template->param('discussionlink' => htmllink(
$page,
$destpage,
- $masterpage . '/' . gettext("Discussion"),
+ $masterpage . '/' . $config{discussionpage},
noimageinline => 1,
forcesubpage => 0,
- linktext => gettext("Discussion"),
+ linktext => $config{discussionpage},
));
}
}
@@ -301,7 +333,10 @@ sub pagetemplate (@) {
&& $masterpage eq "index") {
$template->param('parentlinks' => []);
}
-} # }}}
+ if (ishomepage($page) && $template->query(name => "title")) {
+ $template->param(title => $config{wikiname});
+ }
+}
# Add the renamed page translations to the list of to-be-renamed pages.
sub renamepages (@) {
@@ -327,12 +362,12 @@ sub renamepages (@) {
return () unless istranslatable($torename{src});
my @ret;
- my %otherpages=%{otherlanguages($torename{src})};
+ my %otherpages=%{otherlanguages_pages($torename{src})};
while (my ($lang, $otherpage) = each %otherpages) {
push @ret, {
src => $otherpage,
srcfile => $pagesources{$otherpage},
- dest => otherlanguage($torename{dest}, $lang),
+ dest => otherlanguage_page($torename{dest}, $lang),
destfile => $torename{dest}.".".$lang.".po",
required => 0,
};
@@ -380,45 +415,45 @@ sub change (@) {
resetalreadyfiltered();
require IkiWiki::Render;
foreach my $file (@rendered) {
- debug(sprintf(gettext("building %s"), $file));
- IkiWiki::render($file);
+ IkiWiki::render($file, sprintf(gettext("building %s"), $file));
}
}
my $updated_po_files=0;
# Refresh/create POT and PO files as needed.
- # (But avoid doing so if they are in an underlay directory.)
foreach my $file (grep {istranslatablefile($_)} @rendered) {
my $masterfile=srcfile($file);
my $page=pagename($file);
my $updated_pot_file=0;
+
+ # Avoid touching underlay files.
+ next if $masterfile ne "$config{srcdir}/$file";
+
# Only refresh POT file if it does not exist, or if
- # $pagesources{$page} was changed: don't if only the HTML was
+ # the source was changed: don't if only the HTML was
# refreshed, e.g. because of a dependency.
- if ($masterfile eq "$config{srcdir}/$file" &&
- ((grep { $_ eq $pagesources{$page} } @origneedsbuild)
- || ! -e potfile($masterfile))) {
+ if ((grep { $_ eq $pagesources{$page} } @origneedsbuild) ||
+ ! -e potfile($masterfile)) {
refreshpot($masterfile);
$updated_pot_file=1;
}
my @pofiles;
foreach my $po (pofiles($masterfile)) {
- next if ! $updated_pot_file && ! -e $po;
+ next if ! $updated_pot_file && -e $po;
next if grep { $po=~/\Q$_\E/ } @{$config{underlaydirs}};
push @pofiles, $po;
}
if (@pofiles) {
refreshpofiles($masterfile, @pofiles);
- map { IkiWiki::rcs_add($_) } @pofiles if $config{rcs};
+ map { s/^\Q$config{srcdir}\E\/*//; IkiWiki::rcs_add($_) } @pofiles if $config{rcs};
$updated_po_files=1;
}
}
if ($updated_po_files) {
commit_and_refresh(
- gettext("updated PO files"),
- "IkiWiki::Plugin::po::change");
+ gettext("updated PO files"));
}
}
@@ -523,12 +558,25 @@ sub formbuilder (@) {
# This cannot be done in the formbuilder_setup hook as the list of types is
# computed later.
if ($form->field("do") eq "create") {
- foreach my $field ($form->field) {
+ foreach my $field ($form->field) {
next unless "$field" eq "type";
- if ($field->type eq 'select') {
- # remove po from the list of types
- my @types = grep { $_ ne 'po' } $field->options;
- $field->options(\@types) if @types;
+ next unless $field->type eq 'select';
+ my $orig_value = $field->value;
+ # remove po from the list of types
+ my @types = grep { $_->[0] ne 'po' } $field->options;
+ $field->options(\@types) if @types;
+ # favor the type of linking page's masterpage
+ if ($orig_value eq 'po') {
+ my ($from, $type);
+ if (defined $form->field('from')) {
+ ($from)=$form->field('from')=~/$config{wiki_file_regexp}/;
+ $from = masterpage($from);
+ }
+ if (defined $from && exists $pagesources{$from}) {
+ $type=pagetype($pagesources{$from});
+ }
+ $type=$config{default_pageext} unless defined $type;
+ $field->value($type) ;
}
}
}
@@ -543,11 +591,16 @@ sub mybestlink ($$) {
my $page=shift;
my $link=shift;
+ return $origsubs{'bestlink'}->($page, $link)
+ if defined $config{po_link_to} && $config{po_link_to} eq "default";
+
my $res=$origsubs{'bestlink'}->(masterpage($page), $link);
+ my @caller = caller(1);
if (length $res
- && ($config{po_link_to} eq "current" || $config{po_link_to} eq "negotiated")
&& istranslatable($res)
- && istranslation($page)) {
+ && istranslation($page)
+ && !(exists $caller[3] && defined $caller[3]
+ && ($caller[3] eq "IkiWiki::PageSpec::match_link"))) {
return $res . "." . lang($page);
}
return $res;
@@ -557,12 +610,12 @@ sub mybeautify_urlpath ($) {
my $url=shift;
my $res=$origsubs{'beautify_urlpath'}->($url);
- if ($config{po_link_to} eq "negotiated") {
+ if (defined $config{po_link_to} && $config{po_link_to} eq "negotiated") {
$res =~ s!/\Qindex.$config{po_master_language}{code}.$config{htmlext}\E$!/!;
$res =~ s!/\Qindex.$config{htmlext}\E$!/!;
map {
$res =~ s!/\Qindex.$_.$config{htmlext}\E$!/!;
- } (keys %{$config{po_slave_languages}});
+ } @slavelanguages;
}
return $res;
}
@@ -598,17 +651,21 @@ sub myurlto ($$;$) {
# so that one is redirected to the just-edited page rather than to the
# negociated translation; to prevent unnecessary fiddling with caller/inject,
# we only do so when our beautify_urlpath would actually do what we want to
- # avoid, i.e. when po_link_to = negotiated
+ # avoid, i.e. when po_link_to = negotiated.
+ # also avoid doing so when run by cgi_goto, so that the links on recentchanges
+ # page actually lead to the exact page they pretend to.
if ($config{po_link_to} eq "negotiated") {
my @caller = caller(1);
- my $run_by_editpage = 0;
- $run_by_editpage = 1 if (exists $caller[3] && defined $caller[3]
- && $caller[3] eq "IkiWiki::cgi_editpage");
+ my $use_orig = 0;
+ $use_orig = 1 if (exists $caller[3] && defined $caller[3]
+ && ($caller[3] eq "IkiWiki::cgi_editpage" ||
+ $caller[3] eq "IkiWiki::Plugin::goto::cgi_goto")
+ );
inject(name => "IkiWiki::beautify_urlpath", call => $origsubs{'beautify_urlpath'})
- if $run_by_editpage;
+ if $use_orig;
my $res = $origsubs{'urlto'}->($to,$from,$absolute);
inject(name => "IkiWiki::beautify_urlpath", call => \&mybeautify_urlpath)
- if $run_by_editpage;
+ if $use_orig;
return $res;
}
else {
@@ -626,6 +683,33 @@ sub mycgiurl (@) {
return $origsubs{'cgiurl'}->(%params);
}
+sub myrootpage (@) {
+ my %params=@_;
+
+ my $rootpage;
+ if (exists $params{rootpage}) {
+ $rootpage=$origsubs{'bestlink'}->($params{page}, $params{rootpage});
+ if (!length $rootpage) {
+ $rootpage=$params{rootpage};
+ }
+ }
+ else {
+ $rootpage=masterpage($params{page});
+ }
+ return $rootpage;
+}
+
+sub myisselflink ($$) {
+ my $page=shift;
+ my $link=shift;
+
+ return 1 if $origsubs{'isselflink'}->($page, $link);
+ if (istranslation($page)) {
+ return $origsubs{'isselflink'}->(masterpage($page), $link);
+ }
+ return;
+}
+
# ,----
# | Blackboxes for private data
# `----
@@ -681,6 +765,7 @@ sub istranslatablefile ($) {
my $type=pagetype($file);
return 0 if ! defined $type || $type eq 'po';
return 0 if $file =~ /\.pot$/;
+ return 0 if ! defined $config{po_translatable_pages};
return 1 if pagespec_match(pagename($file), $config{po_translatable_pages});
return;
}
@@ -749,7 +834,7 @@ sub islanguagecode ($) {
return $code =~ /^[a-z]{2}$/;
}
-sub otherlanguage ($$) {
+sub otherlanguage_page ($$) {
my $page=shift;
my $code=shift;
@@ -757,17 +842,31 @@ sub otherlanguage ($$) {
return masterpage($page) . '.' . $code;
}
-sub otherlanguages ($) {
+# Returns the list of other languages codes: the master language comes first,
+# then the codes are ordered the same way as in po_slave_languages, if it is
+# an array, or in the language name lexical order, if it is a hash.
+sub otherlanguages_codes ($) {
my $page=shift;
- my %ret;
- return \%ret unless istranslation($page) || istranslatable($page);
+ my @ret;
+ return \@ret unless istranslation($page) || istranslatable($page);
my $curlang=lang($page);
foreach my $lang
- ($config{po_master_language}{code}, keys %{$config{po_slave_languages}}) {
+ ($config{po_master_language}{code}, @slavelanguages) {
next if $lang eq $curlang;
- $ret{$lang}=otherlanguage($page, $lang);
+ push @ret, $lang;
}
+ return \@ret;
+}
+
+sub otherlanguages_pages ($) {
+ my $page=shift;
+
+ my %ret;
+ map {
+ $ret{$_} = otherlanguage_page($page, $_)
+ } @{otherlanguages_codes($page)};
+
return \%ret;
}
@@ -791,25 +890,25 @@ sub pofile ($$) {
sub pofiles ($) {
my $masterfile=shift;
- return map pofile($masterfile, $_), (keys %{$config{po_slave_languages}});
+ return map pofile($masterfile, $_), @slavelanguages;
}
sub refreshpot ($) {
my $masterfile=shift;
my $potfile=potfile($masterfile);
- my %options = ("markdown" => (pagetype($masterfile) eq 'mdwn') ? 1 : 0);
- my $doc=Locale::Po4a::Chooser::new('text',%options);
+ my $doc=Locale::Po4a::Chooser::new(po4a_type($masterfile),
+ po4a_options($masterfile));
$doc->{TT}{utf_mode} = 1;
- $doc->{TT}{file_in_charset} = 'utf-8';
- $doc->{TT}{file_out_charset} = 'utf-8';
+ $doc->{TT}{file_in_charset} = 'UTF-8';
+ $doc->{TT}{file_out_charset} = 'UTF-8';
$doc->read($masterfile);
# let's cheat a bit to force porefs option to be passed to
# Locale::Po4a::Po; this is undocument use of internal
# Locale::Po4a::TransTractor's data, compulsory since this module
# prevents us from using the porefs option.
$doc->{TT}{po_out}=Locale::Po4a::Po->new({ 'porefs' => 'none' });
- $doc->{TT}{po_out}->set_charset('utf-8');
+ $doc->{TT}{po_out}->set_charset('UTF-8');
# do the actual work
$doc->parse;
IkiWiki::prep_writefile(basename($potfile),dirname($potfile));
@@ -827,6 +926,21 @@ sub refreshpofiles ($@) {
foreach my $pofile (@pofiles) {
IkiWiki::prep_writefile(basename($pofile),dirname($pofile));
+
+ if (! -e $pofile) {
+ # If the po file exists in an underlay, copy it
+ # from there.
+ my ($pobase)=$pofile=~/^\Q$config{srcdir}\E\/?(.*)$/;
+ foreach my $dir (@{$config{underlaydirs}}) {
+ if (-e "$dir/$pobase") {
+ File::Copy::syscopy("$dir/$pobase",$pofile)
+ or error("po(refreshpofiles) ".
+ sprintf(gettext("failed to copy underlay PO file to %s"),
+ $pofile));
+ }
+ }
+ }
+
if (-e $pofile) {
system("msgmerge", "--previous", "-q", "-U", "--backup=none", $pofile, $potfile) == 0
or error("po(refreshpofiles) ".
@@ -875,15 +989,13 @@ sub percenttranslated ($) {
return gettext("N/A") unless istranslation($page);
my $file=srcfile($pagesources{$page});
my $masterfile = srcfile($pagesources{masterpage($page)});
- my %options = (
- "markdown" => (pagetype($masterfile) eq 'mdwn') ? 1 : 0,
- );
- my $doc=Locale::Po4a::Chooser::new('text',%options);
+ my $doc=Locale::Po4a::Chooser::new(po4a_type($masterfile),
+ po4a_options($masterfile));
$doc->process(
'po_in_name' => [ $file ],
'file_in_name' => [ $masterfile ],
- 'file_in_charset' => 'utf-8',
- 'file_out_charset' => 'utf-8',
+ 'file_in_charset' => 'UTF-8',
+ 'file_out_charset' => 'UTF-8',
) or error("po(percenttranslated) ".
sprintf(gettext("failed to translate %s"), $page));
my ($percent,$hit,$queries) = $doc->stats();
@@ -905,30 +1017,25 @@ sub otherlanguagesloop ($) {
my $page=shift;
my @ret;
- my %otherpages=%{otherlanguages($page)};
- while (my ($lang, $otherpage) = each %otherpages) {
- if (istranslation($page) && masterpage($page) eq $otherpage) {
- push @ret, {
- url => urlto_with_orig_beautiful_urlpath($otherpage, $page),
- code => $lang,
- language => languagename($lang),
- master => 1,
- };
- }
- else {
- push @ret, {
- url => urlto_with_orig_beautiful_urlpath($otherpage, $page),
- code => $lang,
- language => languagename($lang),
- percent => percenttranslated($otherpage),
- }
+ if (istranslation($page)) {
+ push @ret, {
+ url => urlto_with_orig_beautiful_urlpath(masterpage($page), $page),
+ code => $config{po_master_language}{code},
+ language => $config{po_master_language}{name},
+ master => 1,
+ };
+ }
+ foreach my $lang (@{otherlanguages_codes($page)}) {
+ next if $lang eq $config{po_master_language}{code};
+ my $otherpage = otherlanguage_page($page, $lang);
+ push @ret, {
+ url => urlto_with_orig_beautiful_urlpath($otherpage, $page),
+ code => $lang,
+ language => languagename($lang),
+ percent => percenttranslated($otherpage),
}
}
- return sort {
- return -1 if $a->{code} eq $config{po_master_language}{code};
- return 1 if $b->{code} eq $config{po_master_language}{code};
- return $a->{language} cmp $b->{language};
- } @ret;
+ return @ret;
}
sub homepageurl (;$) {
@@ -937,6 +1044,14 @@ sub homepageurl (;$) {
return urlto('', $page);
}
+sub ishomepage ($) {
+ my $page = shift;
+
+ return 1 if $page eq 'index';
+ map { return 1 if $page eq 'index.'.$_ } @slavelanguages;
+ return undef;
+}
+
sub deletetranslations ($) {
my $deletedmasterfile=shift;
@@ -948,7 +1063,7 @@ sub deletetranslations ($) {
if (-e $absfile && ! -l $absfile && ! -d $absfile) {
push @todelete, $file;
}
- } keys %{$config{po_slave_languages}};
+ } @slavelanguages;
map {
if ($config{rcs}) {
@@ -961,17 +1076,18 @@ sub deletetranslations ($) {
if (@todelete) {
commit_and_refresh(
- gettext("removed obsolete PO files"),
- "IkiWiki::Plugin::po::deletetranslations");
+ gettext("removed obsolete PO files"));
}
}
-sub commit_and_refresh ($$) {
- my ($msg, $author) = (shift, shift);
+sub commit_and_refresh ($) {
+ my $msg = shift;
if ($config{rcs}) {
IkiWiki::disable_commit_hook();
- IkiWiki::rcs_commit_staged($msg, $author, "127.0.0.1");
+ IkiWiki::rcs_commit_staged(
+ message => $msg,
+ );
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
@@ -989,11 +1105,8 @@ sub commit_and_refresh ($$) {
IkiWiki::saveindex();
}
-# on success, returns the filtered content.
-# on error, if $nonfatal, warn and return undef; else, error out.
-sub po_to_markup ($$;$) {
+sub po_to_markup ($$) {
my ($page, $content) = (shift, shift);
- my $nonfatal = shift;
$content = '' unless defined $content;
$content = decode_utf8(encode_utf8($content));
@@ -1016,10 +1129,6 @@ sub po_to_markup ($$;$) {
my $fail = sub ($) {
my $msg = "po(po_to_markup) - $page : " . shift;
- if ($nonfatal) {
- warn $msg;
- return undef;
- }
error($msg, sub { unlink $infile, $outfile});
};
@@ -1027,21 +1136,18 @@ sub po_to_markup ($$;$) {
or return $fail->(sprintf(gettext("failed to write %s"), $infile));
my $masterfile = srcfile($pagesources{masterpage($page)});
- my %options = (
- "markdown" => (pagetype($masterfile) eq 'mdwn') ? 1 : 0,
- );
- my $doc=Locale::Po4a::Chooser::new('text',%options);
+ my $doc=Locale::Po4a::Chooser::new(po4a_type($masterfile),
+ po4a_options($masterfile));
$doc->process(
'po_in_name' => [ $infile ],
'file_in_name' => [ $masterfile ],
- 'file_in_charset' => 'utf-8',
- 'file_out_charset' => 'utf-8',
+ 'file_in_charset' => 'UTF-8',
+ 'file_out_charset' => 'UTF-8',
) or return $fail->(gettext("failed to translate"));
$doc->write($outfile)
or return $fail->(sprintf(gettext("failed to write %s"), $outfile));
- $content = readfile($outfile)
- or return $fail->(sprintf(gettext("failed to read %s"), $outfile));
+ $content = readfile($outfile);
# Unlinking should happen automatically, thanks to File::Temp,
# but it does not work here, probably because of the way writefile()
@@ -1088,12 +1194,43 @@ sub isvalidpo ($) {
unlink $infile;
if ($res) {
- return IkiWiki::SuccessReason->new("valid gettext data");
+ return IkiWiki::SuccessReason->new("valid gettext data");
}
return IkiWiki::FailReason->new(gettext("invalid gettext data, go back ".
"to previous page to continue edit"));
}
+sub po4a_type ($) {
+ my $file = shift;
+
+ my $pagetype = pagetype($file);
+ if ($pagetype eq 'html') {
+ return 'xhtml';
+ }
+ return 'text';
+}
+
+sub po4a_options($) {
+ my $file = shift;
+
+ my %options;
+ my $pagetype = pagetype($file);
+
+ if ($pagetype eq 'html') {
+ # how to disable options is not consistent across po4a modules
+ $options{includessi} = '';
+ $options{includeexternal} = 0;
+ }
+ elsif ($pagetype eq 'mdwn') {
+ $options{markdown} = 1;
+ }
+ else {
+ $options{markdown} = 0;
+ }
+
+ return %options;
+}
+
# ,----
# | PageSpecs
# `----
@@ -1154,4 +1291,32 @@ sub match_currentlang ($$;@) {
}
}
+sub match_needstranslation ($$;@) {
+ my $page=shift;
+ my $wanted=shift;
+
+ if (defined $wanted && $wanted ne "") {
+ if ($wanted !~ /^\d+$/) {
+ return IkiWiki::FailReason->new("parameter is not an integer");
+ }
+ elsif ($wanted > 100) {
+ return IkiWiki::FailReason->new("parameter is greater than 100");
+ }
+ }
+ else {
+ $wanted=100;
+ }
+
+ my $percenttranslated=IkiWiki::Plugin::po::percenttranslated($page);
+ if ($percenttranslated eq 'N/A') {
+ return IkiWiki::FailReason->new("file is not a translatable page");
+ }
+ elsif ($percenttranslated < $wanted) {
+ return IkiWiki::SuccessReason->new("file has $percenttranslated translated");
+ }
+ else {
+ return IkiWiki::FailReason->new("file is translated enough");
+ }
+}
+
1
diff --git a/IkiWiki/Plugin/poll.pm b/IkiWiki/Plugin/poll.pm
index bc1e3501e..b333e2cdc 100644
--- a/IkiWiki/Plugin/poll.pm
+++ b/IkiWiki/Plugin/poll.pm
@@ -17,6 +17,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -133,9 +134,12 @@ sub sessioncgi ($$) {
$oldchoice=$session->param($choice_param);
if ($config{rcs}) {
IkiWiki::disable_commit_hook();
- IkiWiki::rcs_commit($pagesources{$page}, "poll vote ($choice)",
- IkiWiki::rcs_prepedit($pagesources{$page}),
- $session->param("name"), $ENV{REMOTE_ADDR});
+ IkiWiki::rcs_commit(
+ file => $pagesources{$page},
+ message => "poll vote ($choice)",
+ token => IkiWiki::rcs_prepedit($pagesources{$page}),
+ session => $session,
+ );
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
diff --git a/IkiWiki/Plugin/polygen.pm b/IkiWiki/Plugin/polygen.pm
index bc21d71c7..78e3611e1 100644
--- a/IkiWiki/Plugin/polygen.pm
+++ b/IkiWiki/Plugin/polygen.pm
@@ -20,6 +20,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
diff --git a/IkiWiki/Plugin/postsparkline.pm b/IkiWiki/Plugin/postsparkline.pm
index d2e5c2378..2fae9c5fe 100644
--- a/IkiWiki/Plugin/postsparkline.pm
+++ b/IkiWiki/Plugin/postsparkline.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -30,11 +31,16 @@ sub preprocess (@) {
return "";
}
+ my $deptype;
if (! exists $params{time} || $params{time} ne 'mtime') {
$params{timehash} = \%IkiWiki::pagectime;
+ # need to update when pages are added or removed
+ $deptype = deptype("presence");
}
else {
$params{timehash} = \%IkiWiki::pagemtime;
+ # need to update when pages are changed
+ $deptype = deptype("content");
}
if (! exists $params{formula}) {
@@ -48,12 +54,11 @@ sub preprocess (@) {
error gettext("unknown formula");
}
- add_depends($params{page}, $params{pages});
-
my @list=sort { $params{timehash}->{$b} <=> $params{timehash}->{$a} }
- pagespec_match_list(
- [ grep { $_ ne $params{page} } keys %pagesources],
- $params{pages}, location => $params{page});
+ pagespec_match_list($params{page}, $params{pages},
+ deptype => $deptype,
+ filter => sub { $_[0] eq $params{page} },
+ );
my @data=eval qq{IkiWiki::Plugin::postsparkline::formula::$formula(\\\%params, \@list)};
if ($@) {
diff --git a/IkiWiki/Plugin/progress.pm b/IkiWiki/Plugin/progress.pm
index 76d994acc..d27df5ca8 100644
--- a/IkiWiki/Plugin/progress.pm
+++ b/IkiWiki/Plugin/progress.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -36,16 +37,12 @@ sub preprocess (@) {
$fill.="%";
}
elsif (defined $params{totalpages} and defined $params{donepages}) {
- add_depends($params{page}, $params{totalpages});
- add_depends($params{page}, $params{donepages});
-
- my @pages=keys %pagesources;
- my $totalcount=0;
- my $donecount=0;
- foreach my $page (@pages) {
- $totalcount++ if pagespec_match($page, $params{totalpages}, location => $params{page});
- $donecount++ if pagespec_match($page, $params{donepages}, location => $params{page});
- }
+ my $totalcount=pagespec_match_list(
+ $params{page}, $params{totalpages},
+ deptype => deptype("presence"));
+ my $donecount=pagespec_match_list(
+ $params{page}, $params{donepages},
+ deptype => deptype("presence"));
if ($totalcount == 0) {
$fill = "100%";
diff --git a/IkiWiki/Plugin/rawhtml.pm b/IkiWiki/Plugin/rawhtml.pm
index ad8a610c1..0838bcb22 100644
--- a/IkiWiki/Plugin/rawhtml.pm
+++ b/IkiWiki/Plugin/rawhtml.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # changes file types
+ section => "format",
},
}
diff --git a/IkiWiki/Plugin/recentchanges.pm b/IkiWiki/Plugin/recentchanges.pm
index fa851e466..758b98348 100644
--- a/IkiWiki/Plugin/recentchanges.pm
+++ b/IkiWiki/Plugin/recentchanges.pm
@@ -60,15 +60,15 @@ sub refresh ($) {
}
}
-# Enable the recentchanges link on wiki pages.
+# Enable the recentchanges link.
sub pagetemplate (@) {
my %params=@_;
my $template=$params{template};
my $page=$params{page};
if (defined $config{recentchangespage} && $config{rcs} &&
- $page ne $config{recentchangespage} &&
- $template->query(name => "recentchangesurl")) {
+ $template->query(name => "recentchangesurl") &&
+ $page ne $config{recentchangespage}) {
$template->param(recentchangesurl => urlto($config{recentchangespage}, $page));
$template->param(have_actions => 1);
}
@@ -114,17 +114,16 @@ sub store ($$$) {
];
push @{$change->{pages}}, { link => '...' } if $is_excess;
- # See if the committer is an openid.
$change->{author}=$change->{user};
my $oiduser=eval { IkiWiki::openiduser($change->{user}) };
if (defined $oiduser) {
$change->{authorurl}=$change->{user};
- $change->{user}=$oiduser;
+ $change->{user}=defined $change->{nickname} ? $change->{nickname} : $oiduser;
}
elsif (length $config{cgiurl}) {
$change->{authorurl} = IkiWiki::cgiurl(
do => "goto",
- page => (length $config{userdir} ? "$config{userdir}/" : "").$change->{author},
+ page => IkiWiki::userpage($change->{author}),
);
}
diff --git a/IkiWiki/Plugin/relativedate.pm b/IkiWiki/Plugin/relativedate.pm
index 3e33cd5c3..7296889ab 100644
--- a/IkiWiki/Plugin/relativedate.pm
+++ b/IkiWiki/Plugin/relativedate.pm
@@ -5,7 +5,7 @@ use warnings;
no warnings 'redefine';
use strict;
use IkiWiki 3.00;
-use POSIX;
+use POSIX ();
use Encode;
sub import {
@@ -26,8 +26,8 @@ sub getsetup () {
sub format (@) {
my %params=@_;
- if (! ($params{content}=~s!^(<body>)!$1.include_javascript($params{page})!em)) {
- # no </body> tag, probably in preview mode
+ if (! ($params{content}=~s!^(<body[^>]*>)!$1.include_javascript($params{page})!em)) {
+ # no <body> tag, probably in preview mode
$params{content}=include_javascript($params{page}, 1).$params{content};
}
return $params{content};
@@ -37,24 +37,36 @@ sub include_javascript ($;$) {
my $page=shift;
my $absolute=shift;
- return '<script src="'.urlto("ikiwiki.js", $page, $absolute).
+ return '<script src="'.urlto("ikiwiki/ikiwiki.js", $page, $absolute).
'" type="text/javascript" charset="utf-8"></script>'."\n".
- '<script src="'.urlto("relativedate.js", $page, $absolute).
+ '<script src="'.urlto("ikiwiki/relativedate.js", $page, $absolute).
'" type="text/javascript" charset="utf-8"></script>';
}
-sub mydisplaytime ($;$) {
+sub mydisplaytime ($;$$) {
my $time=shift;
my $format=shift;
+ my $pubdate=shift;
# This needs to be in a form that can be parsed by javascript.
- # Being fairly human readable is also nice, as it will be exposed
- # as the title if javascript is not available.
+ # (Being fairly human readable is also nice, as it will be exposed
+ # as the title if javascript is not available.)
+ my $lc_time=POSIX::setlocale(&POSIX::LC_TIME);
+ POSIX::setlocale(&POSIX::LC_TIME, "C");
my $gmtime=decode_utf8(POSIX::strftime("%a, %d %b %Y %H:%M:%S %z",
localtime($time)));
+ POSIX::setlocale(&POSIX::LC_TIME, $lc_time);
- return '<span class="relativedate" title="'.$gmtime.'">'.
- IkiWiki::formattime($time, $format).'</span>';
+ my $mid=' class="relativedate" title="'.$gmtime.'">'.
+ IkiWiki::formattime($time, $format);
+
+ if ($config{html5}) {
+ return '<time datetime="'.IkiWiki::date_3339($time).'"'.
+ ($pubdate ? ' pubdate="pubdate"' : '').$mid.'</time>';
+ }
+ else {
+ return '<span'.$mid.'</span>';
+ }
}
1
diff --git a/IkiWiki/Plugin/remove.pm b/IkiWiki/Plugin/remove.pm
index cbc8a0f2c..95f148183 100644
--- a/IkiWiki/Plugin/remove.pm
+++ b/IkiWiki/Plugin/remove.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "web",
},
}
@@ -48,10 +49,10 @@ sub check_canremove ($$$) {
# This is sorta overkill, but better safe than sorry.
if (! defined pagetype($pagesources{$page})) {
if (IkiWiki::Plugin::attachment->can("check_canattach")) {
- IkiWiki::Plugin::attachment::check_canattach($session, $page, $file);
+ IkiWiki::Plugin::attachment::check_canattach($session, $page, "$config{srcdir}/$file");
}
else {
- error("renaming of attachments is not allowed");
+ error("removal of attachments is not allowed");
}
}
@@ -102,10 +103,12 @@ sub confirmation_form ($$) {
javascript => 0,
params => $q,
action => $config{cgiurl},
- stylesheet => IkiWiki::baseurl()."style.css",
+ stylesheet => 1,
fields => [qw{do page}],
);
+ $f->field(name => "sid", type => "hidden", value => $session->id,
+ force => 1);
$f->field(name => "do", type => "hidden", value => "remove", force => 1);
return $f, ["Remove", "Cancel"];
@@ -166,7 +169,7 @@ sub formbuilder (@) {
removal_confirm($q, $session, 0, $form->field("page"));
}
elsif ($form->submitted eq "Remove Attachments") {
- my @selected=$q->param("attachment_select");
+ my @selected=map { Encode::decode_utf8($_) } $q->param("attachment_select");
if (! @selected) {
error(gettext("Please select the attachments to remove."));
}
@@ -187,7 +190,9 @@ sub sessioncgi ($$) {
postremove($session);
}
elsif ($form->submitted eq 'Remove' && $form->validate) {
- my @pages=$q->param("page");
+ IkiWiki::checksessionexpiry($q, $session, $q->param('sid'));
+
+ my @pages=$form->field("page");
# Validate removal by checking that the page exists,
# and that the user is allowed to edit(/remove) it.
@@ -208,8 +213,10 @@ sub sessioncgi ($$) {
foreach my $file (@files) {
IkiWiki::rcs_remove($file);
}
- IkiWiki::rcs_commit_staged(gettext("removed"),
- $session->param("name"), $ENV{REMOTE_ADDR});
+ IkiWiki::rcs_commit_staged(
+ message => gettext("removed"),
+ session => $session,
+ );
IkiWiki::enable_commit_hook();
IkiWiki::rcs_update();
}
@@ -237,7 +244,7 @@ sub sessioncgi ($$) {
}
}
else {
- removal_confirm($q, $session, 0, $q->param("page"));
+ removal_confirm($q, $session, 0, $form->field("page"));
}
exit 0;
diff --git a/IkiWiki/Plugin/rename.pm b/IkiWiki/Plugin/rename.pm
index c3e03496f..61d39d4b5 100644
--- a/IkiWiki/Plugin/rename.pm
+++ b/IkiWiki/Plugin/rename.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "web",
},
}
@@ -49,7 +50,7 @@ sub check_canrename ($$$$$$) {
IkiWiki::check_canedit($src, $q, $session);
if ($attachment) {
if (IkiWiki::Plugin::attachment->can("check_canattach")) {
- IkiWiki::Plugin::attachment::check_canattach($session, $src, $srcfile);
+ IkiWiki::Plugin::attachment::check_canattach($session, $src, "$config{srcdir}/$srcfile");
}
else {
error("renaming of attachments is not allowed");
@@ -62,9 +63,8 @@ sub check_canrename ($$$$$$) {
error(gettext("no change to the file name was specified"));
}
- # Must be a legal filename, and not absolute.
- if (IkiWiki::file_pruned($destfile, $config{srcdir}) ||
- $destfile=~/^\//) {
+ # Must be a legal filename.
+ if (IkiWiki::file_pruned($destfile)) {
error(sprintf(gettext("illegal name")));
}
@@ -84,7 +84,7 @@ sub check_canrename ($$$$$$) {
if ($attachment) {
# Note that $srcfile is used here, not $destfile,
# because it wants the current file, to check it.
- IkiWiki::Plugin::attachment::check_canattach($session, $dest, $srcfile);
+ IkiWiki::Plugin::attachment::check_canattach($session, $dest, "$config{srcdir}/$srcfile");
}
}
@@ -126,11 +126,13 @@ sub rename_form ($$$) {
javascript => 0,
params => $q,
action => $config{cgiurl},
- stylesheet => IkiWiki::baseurl()."style.css",
+ stylesheet => 1,
fields => [qw{do page new_name attachment}],
);
$f->field(name => "do", type => "hidden", value => "rename", force => 1);
+ $f->field(name => "sid", type => "hidden", value => $session->id,
+ force => 1);
$f->field(name => "page", type => "hidden", value => $page, force => 1);
$f->field(name => "new_name", value => pagetitle($page, 1), size => 60);
if (!$q->param("attachment")) {
@@ -235,6 +237,7 @@ sub formbuilder (@) {
if (defined $form->field("do") && ($form->field("do") eq "edit" ||
$form->field("do") eq "create")) {
+ IkiWiki::decode_form_utf8($form);
my $q=$params{cgi};
my $session=$params{session};
@@ -242,7 +245,7 @@ sub formbuilder (@) {
rename_start($q, $session, 0, $form->field("page"));
}
elsif ($form->submitted eq "Rename Attachment") {
- my @selected=$q->param("attachment_select");
+ my @selected=map { Encode::decode_utf8($_) } $q->param("attachment_select");
if (@selected > 1) {
error(gettext("Only one attachment can be renamed at a time."));
}
@@ -278,21 +281,23 @@ sub sessioncgi ($$) {
if ($q->param("do") eq 'rename') {
my $session=shift;
- my ($form, $buttons)=rename_form($q, $session, $q->param("page"));
+ my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page")));
IkiWiki::decode_form_utf8($form);
if ($form->submitted eq 'Cancel') {
postrename($session);
}
elsif ($form->submitted eq 'Rename' && $form->validate) {
+ IkiWiki::checksessionexpiry($q, $session, $q->param('sid'));
+
# Queue of rename actions to perfom.
my @torename;
# These untaints are safe because of the checks
# performed in check_canrename later.
- my $src=$q->param("page");
+ my $src=$form->field("page");
my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src});
- my $dest=IkiWiki::possibly_foolish_untaint(titlepage($q->param("new_name")));
+ my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name")));
my $destfile=$dest;
if (! $q->param("attachment")) {
my $type=$q->param('type');
@@ -344,8 +349,9 @@ sub sessioncgi ($$) {
$pagesources{$rename->{src}}=$rename->{destfile};
}
IkiWiki::rcs_commit_staged(
- sprintf(gettext("rename %s to %s"), $srcfile, $destfile),
- $session->param("name"), $ENV{REMOTE_ADDR}) if $config{rcs};
+ message => sprintf(gettext("rename %s to %s"), $srcfile, $destfile),
+ session => $session,
+ ) if $config{rcs};
# Then link fixups.
foreach my $rename (@torename) {
@@ -570,8 +576,8 @@ sub fixlinks ($$$) {
$file,
sprintf(gettext("update for rename of %s to %s"), $rename->{srcfile}, $rename->{destfile}),
$token,
- $session->param("name"),
- $ENV{REMOTE_ADDR}
+ $session->param("name"),
+ $session->remote_addr(),
);
push @fixedlinks, $page if ! defined $conflict;
}
diff --git a/IkiWiki/Plugin/repolist.pm b/IkiWiki/Plugin/repolist.pm
index f69ec3988..ba7c5f0aa 100644
--- a/IkiWiki/Plugin/repolist.pm
+++ b/IkiWiki/Plugin/repolist.pm
@@ -15,6 +15,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "web",
},
repositories => {
type => "string",
diff --git a/IkiWiki/Plugin/rsync.pm b/IkiWiki/Plugin/rsync.pm
new file mode 100644
index 000000000..e38801e4a
--- /dev/null
+++ b/IkiWiki/Plugin/rsync.pm
@@ -0,0 +1,45 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::rsync;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "rsync", call => \&getsetup);
+ hook(type => "change", id => "rsync", call => \&postrefresh);
+ hook(type => "delete", id => "rsync", call => \&postrefresh);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 0,
+ rebuild => 0,
+ },
+ rsync_command => {
+ type => "string",
+ example => "rsync -qa --delete . user\@host:/path/to/docroot/",
+ description => "command to run to sync updated pages",
+ safe => 0,
+ rebuild => 0,
+ },
+}
+
+my $ran=0;
+
+sub postrefresh () {
+ if (defined $config{rsync_command} && ! $ran) {
+ $ran=1;
+ chdir($config{destdir}) || error("chdir: $!");
+ system $config{rsync_command};
+ if ($? == -1) {
+ warn(sprintf(gettext("failed to execute rsync_command: %s"), $!))."\n";
+ }
+ elsif ($? != 0) {
+ warn(sprintf(gettext("rsync_command exited %d"), $? >> 8))."\n";
+ }
+ }
+}
+
+1
diff --git a/IkiWiki/Plugin/search.pm b/IkiWiki/Plugin/search.pm
index 393c17e0f..8fb9dff0c 100644
--- a/IkiWiki/Plugin/search.pm
+++ b/IkiWiki/Plugin/search.pm
@@ -10,9 +10,10 @@ sub import {
hook(type => "getsetup", id => "search", call => \&getsetup);
hook(type => "checkconfig", id => "search", call => \&checkconfig);
hook(type => "pagetemplate", id => "search", call => \&pagetemplate);
- hook(type => "postscan", id => "search", call => \&index);
+ hook(type => "indexhtml", id => "search", call => \&indexhtml);
hook(type => "delete", id => "search", call => \&delete);
hook(type => "cgi", id => "search", call => \&cgi);
+ hook(type => "disable", id => "search", call => \&disable);
}
sub getsetup () {
@@ -20,6 +21,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1,
+ section => "web",
},
omega_cgi => {
type => "string",
@@ -40,6 +42,10 @@ sub checkconfig () {
if (! defined $config{omega_cgi}) {
$config{omega_cgi}="/usr/lib/cgi-bin/omega/omega";
}
+
+ # This is a mass dependency, so if the search form template
+ # changes, every page is rebuilt.
+ add_depends("", "templates/searchform.tmpl");
}
my $form;
@@ -53,6 +59,7 @@ sub pagetemplate (@) {
if (! defined $form) {
my $searchform = template("searchform.tmpl", blind_cache => 1);
$searchform->param(searchaction => $config{cgiurl});
+ $searchform->param(html5 => $config{html5});
$form=$searchform->output;
}
@@ -62,14 +69,14 @@ sub pagetemplate (@) {
my $scrubber;
my $stemmer;
-sub index (@) {
+sub indexhtml (@) {
my %params=@_;
setupfiles();
# A unique pageterm is used to identify the document for a page.
my $pageterm=pageterm($params{page});
- return $params{content} unless defined $pageterm;
+ return unless defined $pageterm;
my $db=xapiandb();
my $doc=Search::Xapian::Document->new();
@@ -106,11 +113,17 @@ sub index (@) {
}
$sample=~s/\n/ /g;
+ my $url=urlto($params{destpage}, "");
+ if (defined $pagestate{$params{page}}{meta}{permalink}) {
+ $url=$pagestate{$params{page}}{meta}{permalink}
+ }
+
# data used by omega
# Decode html entities in it, since omega re-encodes them.
eval q{use HTML::Entities};
+ error $@ if $@;
$doc->set_data(
- "url=".urlto($params{page}, "")."\n".
+ "url=".$url."\n".
"sample=".decode_entities($sample)."\n".
"caption=".decode_entities($caption)."\n".
"modtime=$IkiWiki::pagemtime{$params{page}}\n".
@@ -177,15 +190,15 @@ sub pageterm ($) {
# 240 is the number used by omindex to decide when to hash an
# overlong term. This does not use a compatible hash method though.
if (length $page > 240) {
- eval q{use Digest::SHA1};
+ eval q{use Digest::SHA};
if ($@) {
- debug("search: ".sprintf(gettext("need Digest::SHA1 to index %s"), $page)) if $@;
+ debug("search: ".sprintf(gettext("need Digest::SHA to index %s"), $page)) if $@;
return undef;
}
# Note no colon, therefore it's guaranteed to not overlap
# with a page with the same name as the hash..
- return "U".lc(Digest::SHA1::sha1_hex($page));
+ return "U".lc(Digest::SHA::sha1_hex($page));
}
else {
return "U:".$page;
@@ -213,12 +226,30 @@ sub setupfiles () {
writefile("omega.conf", $config{wikistatedir}."/xapian",
"database_dir .\n".
"template_dir ./templates\n");
+
+ # Avoid omega interpreting anything in the misctemplate
+ # as an omegascript command.
+ my $misctemplate=IkiWiki::misctemplate(gettext("search"), "\0",
+ searchform => "", # avoid showing the small search form
+ );
+ eval q{use HTML::Entities};
+ error $@ if $@;
+ $misctemplate=encode_entities($misctemplate, '\$');
+
+ my $querytemplate=readfile(IkiWiki::template_file("searchquery.tmpl"));
+ $misctemplate=~s/\0/$querytemplate/;
+
writefile("query", $config{wikistatedir}."/xapian/templates",
- IkiWiki::misctemplate(gettext("search"),
- readfile(IkiWiki::template_file("searchquery.tmpl"))));
+ $misctemplate);
$setup=1;
}
}
}
+sub disable () {
+ if (-d $config{wikistatedir}."/xapian") {
+ system("rm", "-rf", $config{wikistatedir}."/xapian");
+ }
+}
+
1
diff --git a/IkiWiki/Plugin/shortcut.pm b/IkiWiki/Plugin/shortcut.pm
index 1840a5722..0cedbe447 100644
--- a/IkiWiki/Plugin/shortcut.pm
+++ b/IkiWiki/Plugin/shortcut.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
diff --git a/IkiWiki/Plugin/sidebar.pm b/IkiWiki/Plugin/sidebar.pm
index 41812e1c1..c1146b7b4 100644
--- a/IkiWiki/Plugin/sidebar.pm
+++ b/IkiWiki/Plugin/sidebar.pm
@@ -10,6 +10,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "getsetup", id => "sidebar", call => \&getsetup);
+ hook(type => "preprocess", id => "sidebar", call => \&preprocess);
hook(type => "pagetemplate", id => "sidebar", call => \&pagetemplate);
}
@@ -19,11 +20,50 @@ sub getsetup () {
safe => 1,
rebuild => 1,
},
+ global_sidebars => {
+ type => "boolean",
+ example => 1,
+ description => "show sidebar page on all pages?",
+ safe => 1,
+ rebuild => 1,
+ },
+}
+
+my %pagesidebar;
+
+sub preprocess (@) {
+ my %params=@_;
+
+ my $page=$params{page};
+ return "" unless $page eq $params{destpage};
+
+ if (! defined $params{content}) {
+ $pagesidebar{$page}=undef;
+ }
+ else {
+ my $file = $pagesources{$page};
+ my $type = pagetype($file);
+
+ $pagesidebar{$page}=
+ IkiWiki::htmlize($page, $page, $type,
+ IkiWiki::linkify($page, $page,
+ IkiWiki::preprocess($page, $page, $params{content})));
+ }
+
+ return "";
}
+my $oldfile;
+my $oldcontent;
+
sub sidebar_content ($) {
my $page=shift;
+ return delete $pagesidebar{$page} if defined $pagesidebar{$page};
+
+ return if ! exists $pagesidebar{$page} &&
+ defined $config{global_sidebars} && ! $config{global_sidebars};
+
my $sidebar_page=bestlink($page, "sidebar") || return;
my $sidebar_file=$pagesources{$sidebar_page} || return;
my $sidebar_type=pagetype($sidebar_file);
@@ -34,7 +74,16 @@ sub sidebar_content ($) {
# currently requires a wiki rebuild.
add_depends($page, $sidebar_page);
- my $content=readfile(srcfile($sidebar_file));
+ my $content;
+ if (defined $oldfile && $sidebar_file eq $oldfile) {
+ $content=$oldcontent;
+ }
+ else {
+ $content=readfile(srcfile($sidebar_file));
+ $oldcontent=$content;
+ $oldfile=$sidebar_file;
+ }
+
return unless length $content;
return IkiWiki::htmlize($sidebar_page, $page, $sidebar_type,
IkiWiki::linkify($sidebar_page, $page,
@@ -47,11 +96,10 @@ sub sidebar_content ($) {
sub pagetemplate (@) {
my %params=@_;
- my $page=$params{page};
my $template=$params{template};
-
- if ($template->query(name => "sidebar")) {
- my $content=sidebar_content($page);
+ if ($params{destpage} eq $params{page} &&
+ $template->query(name => "sidebar")) {
+ my $content=sidebar_content($params{destpage});
if (defined $content && length $content) {
$template->param(sidebar => $content);
}
diff --git a/IkiWiki/Plugin/signinedit.pm b/IkiWiki/Plugin/signinedit.pm
index 032a0034c..31160c02f 100644
--- a/IkiWiki/Plugin/signinedit.pm
+++ b/IkiWiki/Plugin/signinedit.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "auth",
},
}
@@ -29,6 +30,7 @@ sub canedit ($$$) {
# signin can override this.
if (! defined $session->param("name") ||
! IkiWiki::userinfo_get($session->param("name"), "regdate")) {
+ return "" unless exists $IkiWiki::hooks{auth};
return sub { IkiWiki::needsignin($cgi, $session) };
}
else {
diff --git a/IkiWiki/Plugin/skeleton.pm.example b/IkiWiki/Plugin/skeleton.pm.example
index 573510191..a57a2c8fe 100644
--- a/IkiWiki/Plugin/skeleton.pm.example
+++ b/IkiWiki/Plugin/skeleton.pm.example
@@ -20,10 +20,11 @@ sub import {
hook(type => "scan", id => "skeleton", call => \&scan);
hook(type => "htmlize", id => "skeleton", call => \&htmlize);
hook(type => "sanitize", id => "skeleton", call => \&sanitize);
- hook(type => "postscan", id => "skeleton", call => \&postscan);
+ hook(type => "indexhtml", id => "skeleton", call => \&indexhtml);
hook(type => "format", id => "skeleton", call => \&format);
hook(type => "pagetemplate", id => "skeleton", call => \&pagetemplate);
hook(type => "templatefile", id => "skeleton", call => \&templatefile);
+ hook(type => "pageactions", id => "skeleton", call => \&pageactions);
hook(type => "delete", id => "skeleton", call => \&delete);
hook(type => "change", id => "skeleton", call => \&change);
hook(type => "cgi", id => "skeleton", call => \&cgi);
@@ -39,6 +40,8 @@ sub import {
hook(type => "renamepage", id => "skeleton", call => \&renamepage);
hook(type => "rename", id => "skeleton", call => \&rename);
hook(type => "savestate", id => "skeleton", call => \&savestate);
+ hook(type => "genwrapper", id => "skeleton", call => \&genwrapper);
+ hook(type => "disable", id => "skeleton", call => \&disable);
}
sub getopt () {
@@ -50,6 +53,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "misc",
},
skeleton => {
type => "boolean",
@@ -68,7 +72,7 @@ sub refresh () {
debug("skeleton plugin refresh");
}
-sub needsbuild () {
+sub needsbuild ($) {
debug("skeleton plugin needsbuild");
}
@@ -116,10 +120,10 @@ sub sanitize (@) {
return $params{content};
}
-sub postscan (@) {
+sub indexhtml (@) {
my %params=@_;
- debug("skeleton plugin running as postscan");
+ debug("skeleton plugin running as indexhtml");
}
sub format (@) {
@@ -145,6 +149,14 @@ sub templatefile (@) {
debug("skeleton plugin running as a templatefile hook");
}
+sub pageactions (@) {
+ my %params=@_;
+ my $page=$params{page};
+
+ debug("skeleton plugin running as a pageactions hook");
+ return ();
+}
+
sub delete (@) {
my @files=@_;
@@ -239,4 +251,12 @@ sub savestate () {
debug("skeleton plugin running in savestate");
}
+sub genwrapper () {
+ debug("skeleton plugin running in genwrapper");
+}
+
+sub savestate () {
+ debug("skeleton plugin running in disable");
+}
+
1
diff --git a/IkiWiki/Plugin/smiley.pm b/IkiWiki/Plugin/smiley.pm
index 0d77916d0..6f4f49d18 100644
--- a/IkiWiki/Plugin/smiley.pm
+++ b/IkiWiki/Plugin/smiley.pm
@@ -25,7 +25,14 @@ sub getsetup () {
}
sub build_regexp () {
- my $list=readfile(srcfile("smileys.mdwn"));
+ my $srcfile = srcfile("smileys.mdwn", 1);
+ if (! defined $srcfile) {
+ print STDERR sprintf(gettext("smiley plugin will not work without %s"),
+ "smileys.mdwn")."\n";
+ $smiley_regexp='';
+ return;
+ }
+ my $list=readfile($srcfile);
while ($list =~ m/^\s*\*\s+\\\\([^\s]+)\s+\[\[([^]]+)\]\]/mg) {
my $smiley=$1;
my $file=$2;
diff --git a/IkiWiki/Plugin/sortnaturally.pm b/IkiWiki/Plugin/sortnaturally.pm
new file mode 100644
index 000000000..62e42767c
--- /dev/null
+++ b/IkiWiki/Plugin/sortnaturally.pm
@@ -0,0 +1,32 @@
+#!/usr/bin/perl
+# Sort::Naturally-powered title_natural sort order for IkiWiki
+package IkiWiki::Plugin::sortnaturally;
+
+use IkiWiki 3.00;
+no warnings;
+
+sub import {
+ hook(type => "getsetup", id => "sortnaturally", call => \&getsetup);
+}
+
+sub getsetup {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => undef,
+ },
+}
+
+sub checkconfig () {
+ eval q{use Sort::Naturally};
+ error $@ if $@;
+}
+
+package IkiWiki::SortSpec;
+
+sub cmp_title_natural {
+ Sort::Naturally::ncmp(IkiWiki::pagetitle(IkiWiki::basename($a)),
+ IkiWiki::pagetitle(IkiWiki::basename($b)))
+}
+
+1;
diff --git a/IkiWiki/Plugin/sparkline.pm b/IkiWiki/Plugin/sparkline.pm
index c1f016ffd..e28d2605a 100644
--- a/IkiWiki/Plugin/sparkline.pm
+++ b/IkiWiki/Plugin/sparkline.pm
@@ -24,6 +24,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -121,10 +122,10 @@ sub preprocess (@) {
# Use the sha1 of the php code that generates the sparkline as
# the base for its filename.
- eval q{use Digest::SHA1};
+ eval q{use Digest::SHA};
error($@) if $@;
my $fn=$params{page}."/sparkline-".
- IkiWiki::possibly_foolish_untaint(Digest::SHA1::sha1_hex($php)).
+ IkiWiki::possibly_foolish_untaint(Digest::SHA::sha1_hex($php)).
".png";
will_render($params{page}, $fn);
@@ -149,7 +150,7 @@ sub preprocess (@) {
waitpid $pid, 0;
$SIG{PIPE}="DEFAULT";
- if ($sigpipe) {
+ if ($sigpipe || ! defined $png) {
error gettext("failed to run php");
}
@@ -157,7 +158,8 @@ sub preprocess (@) {
writefile($fn, $config{destdir}, $png, 1);
}
else {
- # can't write the file, so embed it in a data uri
+ # in preview mode, embed the image in a data uri
+ # to avoid temp file clutter
eval q{use MIME::Base64};
error($@) if $@;
return "<img src=\"data:image/png;base64,".
diff --git a/IkiWiki/Plugin/svn.pm b/IkiWiki/Plugin/svn.pm
index 06b987f51..9cf82b5db 100644
--- a/IkiWiki/Plugin/svn.pm
+++ b/IkiWiki/Plugin/svn.pm
@@ -19,6 +19,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub checkconfig () {
@@ -44,6 +45,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
svnrepo => {
type => "string",
@@ -142,43 +144,50 @@ sub rcs_prepedit ($) {
}
}
-sub rcs_commit ($$$;$$) {
+sub commitmessage (@) {
+ my %params=@_;
+
+ if (defined $params{session}) {
+ if (defined $params{session}->param("name")) {
+ return "web commit by ".
+ $params{session}->param("name").
+ (length $params{message} ? ": $params{message}" : "");
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ return "web commit from ".
+ $params{session}->remote_addr().
+ (length $params{message} ? ": $params{message}" : "");
+ }
+ }
+ return $params{message};
+}
+
+sub rcs_commit (@) {
# Tries to commit the page; returns undef on _success_ and
# a version of the page with the rcs's conflict markers on failure.
# The file is relative to the srcdir.
- my $file=shift;
- my $message=shift;
- my $rcstoken=shift;
- my $user=shift;
- my $ipaddr=shift;
-
- if (defined $user) {
- $message="web commit by $user".(length $message ? ": $message" : "");
- }
- elsif (defined $ipaddr) {
- $message="web commit from $ipaddr".(length $message ? ": $message" : "");
- }
+ my %params=@_;
if (-d "$config{srcdir}/.svn") {
# Check to see if the page has been changed by someone
# else since rcs_prepedit was called.
- my ($oldrev)=$rcstoken=~/^([0-9]+)$/; # untaint
- my $rev=svn_info("Revision", "$config{srcdir}/$file");
+ my ($oldrev)=$params{token}=~/^([0-9]+)$/; # untaint
+ my $rev=svn_info("Revision", "$config{srcdir}/$params{file}");
if (defined $rev && defined $oldrev && $rev != $oldrev) {
# Merge their changes into the file that we've
# changed.
if (system("svn", "merge", "--quiet", "-r$oldrev:$rev",
- "$config{srcdir}/$file", "$config{srcdir}/$file") != 0) {
+ "$config{srcdir}/$params{file}", "$config{srcdir}/$params{file}") != 0) {
warn("svn merge -r$oldrev:$rev failed\n");
}
}
if (system("svn", "commit", "--quiet",
"--encoding", "UTF-8", "-m",
- IkiWiki::possibly_foolish_untaint($message),
+ IkiWiki::possibly_foolish_untaint(commitmessage(%params)),
$config{srcdir}) != 0) {
- my $conflict=readfile("$config{srcdir}/$file");
- if (system("svn", "revert", "--quiet", "$config{srcdir}/$file") != 0) {
+ my $conflict=readfile("$config{srcdir}/$params{file}");
+ if (system("svn", "revert", "--quiet", "$config{srcdir}/$params{file}") != 0) {
warn("svn revert failed\n");
}
return $conflict;
@@ -187,21 +196,14 @@ sub rcs_commit ($$$;$$) {
return undef # success
}
-sub rcs_commit_staged ($$$) {
+sub rcs_commit_staged (@) {
# Commits all staged changes. Changes can be staged using rcs_add,
# rcs_remove, and rcs_rename.
- my ($message, $user, $ipaddr)=@_;
-
- if (defined $user) {
- $message="web commit by $user".(length $message ? ": $message" : "");
- }
- elsif (defined $ipaddr) {
- $message="web commit from $ipaddr".(length $message ? ": $message" : "");
- }
+ my %params=@_;
if (system("svn", "commit", "--quiet",
"--encoding", "UTF-8", "-m",
- IkiWiki::possibly_foolish_untaint($message),
+ IkiWiki::possibly_foolish_untaint(commitmessage(%params)),
$config{srcdir}) != 0) {
warn("svn commit failed\n");
return 1; # failure
@@ -348,34 +350,58 @@ sub rcs_diff ($) {
return `svnlook diff $config{svnrepo} -r$rev --no-diff-deleted`;
}
-sub rcs_getctime ($) {
+{
+
+my ($lastfile, $lastmtime, $lastctime);
+
+sub findtimes ($) {
my $file=shift;
+ if (defined $lastfile && $lastfile eq $file) {
+ return $lastmtime, $lastctime;
+ }
+ $lastfile=$file;
+
my $svn_log_infoline=qr/^r\d+\s+\|\s+[^\s]+\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/;
my $child = open(SVNLOG, "-|");
if (! $child) {
- exec("svn", "log", $file) || error("svn log $file failed to run");
+ exec("svn", "log", "$config{srcdir}/$file") || error("svn log failed to run");
}
- my $date;
+ my ($cdate, $mdate);
while (<SVNLOG>) {
if (/$svn_log_infoline/) {
- $date=$1;
+ $cdate=$1;
+ $mdate=$1 unless defined $mdate;
}
}
- close SVNLOG || warn "svn log $file exited $?";
+ close SVNLOG || error "svn log exited $?";
- if (! defined $date) {
- warn "failed to parse svn log for $file\n";
- return 0;
+ if (! defined $cdate) {
+ error "failed to parse svn log for $file";
}
eval q{use Date::Parse};
error($@) if $@;
- $date=str2time($date);
- debug("found ctime ".localtime($date)." for $file");
- return $date;
+
+ $lastctime=str2time($cdate);
+ $lastmtime=str2time($mdate);
+ return $lastmtime, $lastctime;
+}
+
+}
+
+sub rcs_getctime ($) {
+ my $file=shift;
+
+ return (findtimes($file))[1];
+}
+
+sub rcs_getmtime ($) {
+ my $file=shift;
+
+ return (findtimes($file))[0];
}
1
diff --git a/IkiWiki/Plugin/table.pm b/IkiWiki/Plugin/table.pm
index 96d63f455..2edd1eacd 100644
--- a/IkiWiki/Plugin/table.pm
+++ b/IkiWiki/Plugin/table.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
diff --git a/IkiWiki/Plugin/tag.pm b/IkiWiki/Plugin/tag.pm
index cdcfaf536..55064a9a3 100644
--- a/IkiWiki/Plugin/tag.pm
+++ b/IkiWiki/Plugin/tag.pm
@@ -6,8 +6,6 @@ use warnings;
use strict;
use IkiWiki 3.00;
-my %tags;
-
sub import {
hook(type => "getopt", id => "tag", call => \&getopt);
hook(type => "getsetup", id => "tag", call => \&getsetup);
@@ -36,12 +34,19 @@ sub getsetup () {
safe => 1,
rebuild => 1,
},
+ tag_autocreate => {
+ type => "boolean",
+ example => 1,
+ description => "autocreate new tag pages?",
+ safe => 1,
+ rebuild => undef,
+ },
}
-sub tagpage ($) {
+sub taglink ($) {
my $tag=shift;
-
- if ($tag !~ m{^\.?/} &&
+
+ if ($tag !~ m{^/} &&
defined $config{tagbase}) {
$tag="/".$config{tagbase}."/".$tag;
$tag=~y#/#/#s; # squash dups
@@ -50,13 +55,46 @@ sub tagpage ($) {
return $tag;
}
-sub taglink ($$$;@) {
+sub htmllink_tag ($$$;@) {
my $page=shift;
my $destpage=shift;
my $tag=shift;
my %opts=@_;
- return htmllink($page, $destpage, tagpage($tag), %opts);
+ return htmllink($page, $destpage, taglink($tag), %opts);
+}
+
+sub gentag ($) {
+ my $tag=shift;
+
+ if ($config{tag_autocreate} ||
+ ($config{tagbase} && ! defined $config{tag_autocreate})) {
+ my $tagpage=taglink($tag);
+ if ($tagpage=~/^\.\/(.*)/) {
+ $tagpage=$1;
+ }
+ else {
+ $tagpage=~s/^\///;
+ }
+
+ my $tagfile = newpagefile($tagpage, $config{default_pageext});
+
+ add_autofile($tagfile, "tag", sub {
+ my $message=sprintf(gettext("creating tag page %s"), $tagpage);
+ debug($message);
+
+ my $template=template("autotag.tmpl");
+ $template->param(tagname => IkiWiki::basename($tag));
+ $template->param(tag => $tag);
+ writefile($tagfile, $config{srcdir}, $template->output);
+ if ($config{rcs}) {
+ IkiWiki::disable_commit_hook();
+ IkiWiki::rcs_add($tagfile);
+ IkiWiki::rcs_commit_staged(message => $message);
+ IkiWiki::enable_commit_hook();
+ }
+ });
+ }
}
sub preprocess_tag (@) {
@@ -71,9 +109,11 @@ sub preprocess_tag (@) {
foreach my $tag (keys %params) {
$tag=linkpage($tag);
- $tags{$page}{$tag}=1;
+
# hidden WikiLink
- add_link($page, tagpage($tag));
+ add_link($page, taglink($tag), 'tag');
+
+ gentag($tag);
}
return "";
@@ -87,16 +127,16 @@ sub preprocess_taglink (@) {
return join(" ", map {
if (/(.*)\|(.*)/) {
my $tag=linkpage($2);
- $tags{$params{page}}{$tag}=1;
- add_link($params{page}, tagpage($tag));
- return taglink($params{page}, $params{destpage}, $tag,
+ add_link($params{page}, taglink($tag), 'tag');
+ gentag($tag);
+ return htmllink_tag($params{page}, $params{destpage}, $tag,
linktext => pagetitle($1));
}
else {
my $tag=linkpage($_);
- $tags{$params{page}}{$tag}=1;
- add_link($params{page}, tagpage($tag));
- return taglink($params{page}, $params{destpage}, $tag);
+ add_link($params{page}, taglink($tag), 'tag');
+ gentag($tag);
+ return htmllink_tag($params{page}, $params{destpage}, $tag);
}
}
grep {
@@ -110,17 +150,19 @@ sub pagetemplate (@) {
my $destpage=$params{destpage};
my $template=$params{template};
+ my $tags = $typedlinks{$page}{tag};
+
$template->param(tags => [
map {
- link => taglink($page, $destpage, $_, rel => "tag")
- }, sort keys %{$tags{$page}}
- ]) if exists $tags{$page} && %{$tags{$page}} && $template->query(name => "tags");
+ link => htmllink_tag($page, $destpage, $_, rel => "tag")
+ }, sort keys %$tags
+ ]) if defined $tags && %$tags && $template->query(name => "tags");
if ($template->query(name => "categories")) {
# It's an rss/atom template. Add any categories.
- if (exists $tags{$page} && %{$tags{$page}}) {
+ if (defined $tags && %$tags) {
$template->param(categories => [map { category => $_ },
- sort keys %{$tags{$page}}]);
+ sort keys %$tags]);
}
}
}
@@ -128,9 +170,9 @@ sub pagetemplate (@) {
package IkiWiki::PageSpec;
sub match_tagged ($$;@) {
- my $page = shift;
- my $glob = shift;
- return match_link($page, IkiWiki::Plugin::tag::tagpage($glob));
+ my $page=shift;
+ my $glob=IkiWiki::Plugin::tag::taglink(shift);
+ return match_link($page, $glob, linktype => 'tag', @_);
}
1
diff --git a/IkiWiki/Plugin/template.pm b/IkiWiki/Plugin/template.pm
index b6097bb49..db26bfe31 100644
--- a/IkiWiki/Plugin/template.pm
+++ b/IkiWiki/Plugin/template.pm
@@ -5,7 +5,6 @@ package IkiWiki::Plugin::template;
use warnings;
use strict;
use IkiWiki 3.00;
-use HTML::Template;
use Encode;
sub import {
@@ -19,63 +18,57 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
sub preprocess (@) {
my %params=@_;
+ # This needs to run even in scan mode, in order to process
+ # links and other metadata included via the template.
+ my $scan=! defined wantarray;
+
if (! exists $params{id}) {
error gettext("missing id parameter")
}
- my $template_page="templates/$params{id}";
- add_depends($params{page}, $template_page);
-
- my $template_file=$pagesources{$template_page};
- return sprintf(gettext("template %s not found"),
- htmllink($params{page}, $params{destpage}, "/".$template_page))
- unless defined $template_file;
-
+ # The bare id is used, so a page templates/$id can be used as
+ # the template.
my $template;
eval {
- $template=HTML::Template->new(
- filter => sub {
- my $text_ref = shift;
- $$text_ref=&Encode::decode_utf8($$text_ref);
- chomp $$text_ref;
- },
- filename => srcfile($template_file),
- die_on_bad_params => 0,
- no_includes => 1,
- blind_cache => 1,
- );
+ $template=template_depends($params{id}, $params{page},
+ blind_cache => 1);
};
if ($@) {
- error gettext("failed to process:")." $@"
+ error gettext("failed to process template:")." $@";
+ }
+ if (! $template) {
+ error sprintf(gettext("%s not found"),
+ htmllink($params{page}, $params{destpage},
+ "/templates/$params{id}"))
}
$params{basename}=IkiWiki::basename($params{page});
foreach my $param (keys %params) {
+ my $value=IkiWiki::preprocess($params{page}, $params{destpage},
+ $params{$param}, $scan);
if ($template->query(name => $param)) {
- $template->param($param =>
- IkiWiki::htmlize($params{page}, $params{destpage},
+ my $htmlvalue=IkiWiki::htmlize($params{page}, $params{destpage},
pagetype($pagesources{$params{page}}),
- $params{$param}));
+ $value);
+ chomp $htmlvalue;
+ $template->param($param => $htmlvalue);
}
if ($template->query(name => "raw_$param")) {
- $template->param("raw_$param" => $params{$param});
+ chomp $value;
+ $template->param("raw_$param" => $value);
}
}
- # This needs to run even in scan mode, in order to process
- # links and other metadata includes via the template.
- my $scan=! defined wantarray;
-
return IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage},
- $template->output), $scan);
+ $template->output, $scan);
}
1
diff --git a/IkiWiki/Plugin/teximg.pm b/IkiWiki/Plugin/teximg.pm
index dba5372b5..521af499f 100644
--- a/IkiWiki/Plugin/teximg.pm
+++ b/IkiWiki/Plugin/teximg.pm
@@ -8,6 +8,7 @@ use strict;
use Digest::MD5 qw(md5_hex);
use File::Temp qw(tempdir);
use HTML::Entities;
+use Encode;
use IkiWiki 3.00;
my $default_prefix = <<EOPREFIX;
@@ -31,6 +32,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
teximg_dvipng => {
type => "boolean",
@@ -69,13 +71,7 @@ sub preprocess (@) {
if (! defined $code && ! length $code) {
error gettext("missing tex code");
}
-
- if (check($code)) {
- return create($code, check_height($height), \%params);
- }
- else {
- error gettext("code includes disallowed latex commands")
- }
+ return create($code, check_height($height), \%params);
}
sub check_height ($) {
@@ -108,7 +104,7 @@ sub create ($$$) {
$height = 12;
}
- my $digest = md5_hex($code, $height);
+ my $digest = md5_hex(Encode::encode_utf8($code), $height);
my $imglink= $params->{page} . "/$digest.png";
my $imglog = $params->{page} . "/$digest.log";
@@ -155,7 +151,7 @@ sub gen_image ($$$$) {
my $tmp = eval { create_tmp_dir($digest) };
if (! $@ &&
writefile("$digest.tex", $tmp, $tex) &&
- system("cd $tmp; latex --interaction=nonstopmode $tmp/$digest.tex > /dev/null") == 0 &&
+ system("cd $tmp; shell_escape=f openout_any=p openin_any=p latex --interaction=nonstopmode $digest.tex < /dev/null > /dev/null") == 0 &&
# ensure destination directory exists
writefile("$imagedir/$digest.png", $config{destdir}, "") &&
(($config{teximg_dvipng} &&
@@ -191,34 +187,4 @@ sub create_tmp_dir ($) {
return $tmpdir;
}
-sub check ($) {
- # Check if the code is ok
- my $code = shift;
-
- my @badthings = (
- qr/\$\$/,
- qr/\\include/,
- qr/\\includegraphic/,
- qr/\\usepackage/,
- qr/\\newcommand/,
- qr/\\renewcommand/,
- qr/\\def/,
- qr/\\input/,
- qr/\\open/,
- qr/\\loop/,
- qr/\\errorstopmode/,
- qr/\\scrollmode/,
- qr/\\batchmode/,
- qr/\\read/,
- qr/\\write/,
- );
-
- foreach my $thing (@badthings) {
- if ($code =~ m/$thing/ ) {
- return 0;
- }
- }
- return 1;
-}
-
1
diff --git a/IkiWiki/Plugin/textile.pm b/IkiWiki/Plugin/textile.pm
index 8cc5a7951..56bb4bffc 100644
--- a/IkiWiki/Plugin/textile.pm
+++ b/IkiWiki/Plugin/textile.pm
@@ -19,6 +19,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
}
diff --git a/IkiWiki/Plugin/theme.pm b/IkiWiki/Plugin/theme.pm
new file mode 100644
index 000000000..03b0816ed
--- /dev/null
+++ b/IkiWiki/Plugin/theme.pm
@@ -0,0 +1,65 @@
+#!/usr/bin/perl
+package IkiWiki::Plugin::theme;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "theme", call => \&getsetup);
+ hook(type => "checkconfig", id => "theme", call => \&checkconfig);
+ hook(type => "needsbuild", id => "theme", call => \&needsbuild);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => 0,
+ section => "web",
+ },
+ theme => {
+ type => "string",
+ example => "actiontabs",
+ description => "name of theme to enable",
+ safe => 1,
+ rebuild => 0,
+ },
+}
+
+my $added=0;
+sub checkconfig () {
+ if (! $added && exists $config{theme} && $config{theme} =~ /^\w+$/) {
+ add_underlay("themes/".$config{theme});
+ $added=1;
+ }
+}
+
+sub needsbuild ($) {
+ my $needsbuild=shift;
+ if (($config{theme} || '') ne ($wikistate{theme}{currenttheme} || '')) {
+ # theme changed; ensure all files in the theme are built
+ my %needsbuild=map { $_ => 1 } @$needsbuild;
+ if ($config{theme}) {
+ foreach my $file (glob("$config{underlaydirbase}/themes/$config{theme}/*")) {
+ if (-f $file) {
+ my $f=IkiWiki::basename($file);
+ push @$needsbuild, $f
+ unless $needsbuild{$f};
+ }
+ }
+ }
+ elsif ($wikistate{theme}{currenttheme}) {
+ foreach my $file (glob("$config{underlaydirbase}/themes/$wikistate{theme}{currenttheme}/*")) {
+ my $f=IkiWiki::basename($file);
+ if (-f $file && defined eval { srcfile($f) }) {
+ push @$needsbuild, $f;
+ }
+ }
+ }
+
+ $wikistate{theme}{currenttheme}=$config{theme};
+ }
+}
+
+1
diff --git a/IkiWiki/Plugin/tla.pm b/IkiWiki/Plugin/tla.pm
index f4b20a6ec..da4385446 100644
--- a/IkiWiki/Plugin/tla.pm
+++ b/IkiWiki/Plugin/tla.pm
@@ -18,6 +18,7 @@ sub import {
hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+ hook(type => "rcs", id => "rcs_getmtime", call => \&rcs_getmtime);
}
sub checkconfig () {
@@ -34,6 +35,7 @@ sub getsetup () {
plugin => {
safe => 0, # rcs plugin
rebuild => undef,
+ section => "rcs",
},
tla_wrapper => {
type => "string",
@@ -96,18 +98,23 @@ sub rcs_prepedit ($) {
}
}
-sub rcs_commit ($$$;$$) {
- my $file=shift;
- my $message=shift;
- my $rcstoken=shift;
- my $user=shift;
- my $ipaddr=shift;
+sub rcs_commit (@) {
+ my %params=@_;
- if (defined $user) {
- $message="web commit by $user".(length $message ? ": $message" : "");
- }
- elsif (defined $ipaddr) {
- $message="web commit from $ipaddr".(length $message ? ": $message" : "");
+ my ($file, $message, $rcstoken)=
+ ($params{file}, $params{message}, $params{token});
+
+ if (defined $params{session}) {
+ if (defined $params{session}->param("name")) {
+ $message="web commit by ".
+ $params{session}->param("name").
+ (length $message ? ": $message" : "");
+ }
+ elsif (defined $params{session}->remote_addr()) {
+ $message="web commit from ".
+ $params{session}->remote_addr().
+ (length $message ? ": $message" : "");
+ }
}
if (-d "$config{srcdir}/{arch}") {
@@ -137,10 +144,10 @@ sub rcs_commit ($$$;$$) {
return undef # success
}
-sub rcs_commit_staged ($$$) {
+sub rcs_commit_staged (@) {
# Commits all staged changes. Changes can be staged using rcs_add,
# rcs_remove, and rcs_rename.
- my ($message, $user, $ipaddr)=@_;
+ my %params=@_;
error("rcs_commit_staged not implemented for tla"); # TODO
}
@@ -161,7 +168,7 @@ sub rcs_remove ($) {
error("rcs_remove not implemented for tla"); # TODO
}
-sub rcs_rename ($$) { # {{{a
+sub rcs_rename ($$) {
my ($src, $dest) = @_;
error("rcs_rename not implemented for tla"); # TODO
@@ -283,4 +290,8 @@ sub rcs_getctime ($) {
return $date;
}
+sub rcs_getmtime ($) {
+ error "rcs_getmtime is not implemented for tla\n"; # TODO
+}
+
1
diff --git a/IkiWiki/Plugin/toc.pm b/IkiWiki/Plugin/toc.pm
index a585564e7..ac07b9af6 100644
--- a/IkiWiki/Plugin/toc.pm
+++ b/IkiWiki/Plugin/toc.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -53,8 +54,8 @@ sub format (@) {
my $page="";
my $index="";
my %anchors;
- my $curlevel;
- my $startlevel=0;
+ my $startlevel=($params{startlevel} ? $params{startlevel} : 0);
+ my $curlevel=$startlevel-1;
my $liststarted=0;
my $indent=sub { "\t" x $curlevel };
$p->handler(start => sub {
@@ -65,12 +66,17 @@ sub format (@) {
my $anchor="index".++$anchors{$level}."h$level";
$page.="$text<a name=\"$anchor\"></a>";
- # Take the first header level seen as the topmost level,
+ # Unless we're given startlevel as a parameter,
+ # take the first header level seen as the topmost level,
# even if there are higher levels seen later on.
if (! $startlevel) {
$startlevel=$level;
$curlevel=$startlevel-1;
}
+ elsif (defined $params{startlevel} &&
+ $level < $params{startlevel}) {
+ return;
+ }
elsif ($level < $startlevel) {
$level=$startlevel;
}
diff --git a/IkiWiki/Plugin/toggle.pm b/IkiWiki/Plugin/toggle.pm
index aae8cdf84..1f93f87fe 100644
--- a/IkiWiki/Plugin/toggle.pm
+++ b/IkiWiki/Plugin/toggle.pm
@@ -20,6 +20,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
@@ -49,8 +50,7 @@ sub preprocess_toggleable (@) {
# Preprocess the text to expand any preprocessor directives
# embedded inside it.
- $params{text}=IkiWiki::preprocess($params{page}, $params{destpage},
- IkiWiki::filter($params{page}, $params{destpage}, $params{text}));
+ $params{text}=IkiWiki::preprocess($params{page}, $params{destpage}, $params{text});
my $id=genid($params{page}, $params{id});
my $class=(lc($params{open}) ne "yes") ? "toggleable" : "toggleable-open";
@@ -68,8 +68,8 @@ sub format (@) {
if ($params{content}=~s!(<div class="toggleable(?:-open)?" id="[^"]+">\s*)</div>!$1!g) {
$params{content}=~s/<div class="toggleableend">//g;
- if (! ($params{content}=~s!^(<body>)!$1.include_javascript($params{page})!em)) {
- # no </body> tag, probably in preview mode
+ if (! ($params{content}=~s!^(<body[^>]*>)!$1.include_javascript($params{page})!em)) {
+ # no <body> tag, probably in preview mode
$params{content}=include_javascript($params{page}, 1).$params{content};
}
}
@@ -80,9 +80,9 @@ sub include_javascript ($;$) {
my $page=shift;
my $absolute=shift;
- return '<script src="'.urlto("ikiwiki.js", $page, $absolute).
+ return '<script src="'.urlto("ikiwiki/ikiwiki.js", $page, $absolute).
'" type="text/javascript" charset="utf-8"></script>'."\n".
- '<script src="'.urlto("toggle.js", $page, $absolute).
+ '<script src="'.urlto("ikiwiki/toggle.js", $page, $absolute).
'" type="text/javascript" charset="utf-8"></script>';
}
diff --git a/IkiWiki/Plugin/txt.pm b/IkiWiki/Plugin/txt.pm
index 8599bdc8e..0d9a0b35b 100644
--- a/IkiWiki/Plugin/txt.pm
+++ b/IkiWiki/Plugin/txt.pm
@@ -29,6 +29,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 1, # format plugin
+ section => "format",
},
}
@@ -38,7 +39,14 @@ sub filter (@) {
my %params = @_;
my $content = $params{content};
- if (defined $pagesources{$params{page}} && $pagesources{$params{page}} =~ /\.txt$/) {
+ if (defined $pagesources{$params{page}} &&
+ $pagesources{$params{page}} =~ /\.txt$/) {
+ if ($pagesources{$params{page}} eq 'robots.txt' &&
+ $params{page} eq $params{destpage}) {
+ will_render($params{page}, 'robots.txt');
+ writefile('robots.txt', $config{destdir}, $content);
+ }
+
encode_entities($content, "<>&");
if ($findurl) {
my $finder = URI::Find->new(sub {
diff --git a/IkiWiki/Plugin/typography.pm b/IkiWiki/Plugin/typography.pm
index f62be82bb..9389b24d4 100644
--- a/IkiWiki/Plugin/typography.pm
+++ b/IkiWiki/Plugin/typography.pm
@@ -9,7 +9,7 @@ use IkiWiki 3.00;
sub import {
hook(type => "getopt", id => "typography", call => \&getopt);
hook(type => "getsetup", id => "typography", call => \&getsetup);
- IkiWiki::hook(type => "sanitize", id => "typography", call => \&sanitize);
+ hook(type => "sanitize", id => "typography", call => \&sanitize);
}
sub getopt () {
diff --git a/IkiWiki/Plugin/underlay.pm b/IkiWiki/Plugin/underlay.pm
index 380d418fb..3ea19c635 100644
--- a/IkiWiki/Plugin/underlay.pm
+++ b/IkiWiki/Plugin/underlay.pm
@@ -21,7 +21,7 @@ sub getsetup () {
},
add_underlays => {
type => "string",
- default => [],
+ example => ["$ENV{HOME}/wiki.underlay"],
description => "extra underlay directories to add",
advanced => 1,
safe => 0,
@@ -30,10 +30,10 @@ sub getsetup () {
}
sub checkconfig () {
- return unless exists $config{add_underlays};
-
- foreach my $dir (@{$config{add_underlays}}) {
- add_underlay($dir);
+ if ($config{add_underlays}) {
+ foreach my $dir (@{$config{add_underlays}}) {
+ add_underlay($dir);
+ }
}
}
diff --git a/IkiWiki/Plugin/version.pm b/IkiWiki/Plugin/version.pm
index 587cd55fa..c13643478 100644
--- a/IkiWiki/Plugin/version.pm
+++ b/IkiWiki/Plugin/version.pm
@@ -17,6 +17,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => undef,
+ section => "widget",
},
}
diff --git a/IkiWiki/Plugin/websetup.pm b/IkiWiki/Plugin/websetup.pm
index 9edd22d26..11b4428e3 100644
--- a/IkiWiki/Plugin/websetup.pm
+++ b/IkiWiki/Plugin/websetup.pm
@@ -18,6 +18,7 @@ sub getsetup () {
plugin => {
safe => 1,
rebuild => 0,
+ section => "web",
},
websetup_force_plugins => {
type => "string",
@@ -26,6 +27,13 @@ sub getsetup () {
safe => 0,
rebuild => 0,
},
+ websetup_unsafe => {
+ type => "string",
+ example => [],
+ description => "list of additional setup field keys to treat as unsafe",
+ safe => 0,
+ rebuild => 0,
+ },
websetup_show_unsafe => {
type => "boolean",
example => 1,
@@ -56,6 +64,12 @@ sub formatexample ($$) {
}
}
+sub issafe ($) {
+ my $key=shift;
+
+ return ! grep { $_ eq $key } @{$config{websetup_unsafe}};
+}
+
sub showfields ($$$@) {
my $form=shift;
my $plugin=shift;
@@ -66,27 +80,30 @@ sub showfields ($$$@) {
while (@_) {
my $key=shift;
my %info=%{shift()};
+
+ if ($key eq 'plugin') {
+ %plugininfo=%info;
+ next;
+ }
# skip internal settings
next if defined $info{type} && $info{type} eq "internal";
# XXX hashes not handled yet
next if ref $config{$key} && ref $config{$key} eq 'HASH' || ref $info{example} eq 'HASH';
# maybe skip unsafe settings
- next if ! $info{safe} && ! ($config{websetup_show_unsafe} && $config{websetup_advanced});
+ next if ! ($config{websetup_show_unsafe} && $config{websetup_advanced}) &&
+ (! $info{safe} || ! issafe($key));
# maybe skip advanced settings
next if $info{advanced} && ! $config{websetup_advanced};
# these are handled specially, so don't show
next if $key eq 'add_plugins' || $key eq 'disable_plugins';
- if ($key eq 'plugin') {
- %plugininfo=%info;
- next;
- }
-
push @show, $key, \%info;
}
- my $section=defined $plugin ? $plugin." ".gettext("plugin") : "main";
+ my $section=defined $plugin
+ ? sprintf(gettext("%s plugin:"), $plugininfo{section})." ".$plugin
+ : "main";
my %enabledfields;
my $shownfields=0;
@@ -97,6 +114,16 @@ sub showfields ($$$@) {
@show=();
}
+ my $section_fieldset;
+ if (defined $plugin) {
+ # Define the combined fieldset for the plugin's section.
+ # This ensures that this fieldset comes first.
+ $section_fieldset=sprintf(gettext("%s plugins"), $plugininfo{section});
+ $form->field(name => "placeholder.$plugininfo{section}",
+ type => "hidden",
+ fieldset => $section_fieldset);
+ }
+
# show plugin toggle
if (defined $plugin && (! $plugin_forced || $config{websetup_advanced})) {
my $name="enable.$plugin";
@@ -137,9 +164,16 @@ sub showfields ($$$@) {
my $name=defined $plugin ? $plugin.".".$key : $section.".".$key;
my $value=$config{$key};
+ if (! defined $value) {
+ $value="";
+ }
- if ($info{safe} && (ref $value eq 'ARRAY' || ref $info{example} eq 'ARRAY')) {
- $value=[(ref $value eq 'ARRAY' ? @{$value} : ""), "", ""]; # blank items for expansion
+ if (ref $value eq 'ARRAY' || ref $info{example} eq 'ARRAY') {
+ $value=[(ref $value eq 'ARRAY' ? map { Encode::encode_utf8($_) } @{$value} : "")];
+ push @$value, "", "" if $info{safe} && issafe($key); # blank items for expansion
+ }
+ else {
+ $value=Encode::encode_utf8($value);
}
if ($info{type} eq "string") {
@@ -190,7 +224,7 @@ sub showfields ($$$@) {
}
}
- if (! $info{safe}) {
+ if (! $info{safe} || ! issafe($key)) {
$form->field(name => $name, disabled => 1);
}
else {
@@ -199,11 +233,11 @@ sub showfields ($$$@) {
$shownfields++;
}
- # if no fields were shown for the plugin, drop it into the
- # plugins fieldset
+ # if no fields were shown for the plugin, drop it into a combined
+ # fieldset for its section
if (defined $plugin && (! $plugin_forced || $config{websetup_advanced}) &&
! $shownfields) {
- $form->field(name => "enable.$plugin", fieldset => "plugins");
+ $form->field(name => "enable.$plugin", fieldset => $section_fieldset);
}
return %enabledfields;
@@ -219,18 +253,16 @@ sub enable_plugin ($) {
sub disable_plugin ($) {
my $plugin=shift;
- if (grep { $_ eq $plugin } @{$config{add_plugins}}) {
- $config{add_plugins}=[grep { $_ ne $plugin } @{$config{add_plugins}}];
- }
- else {
- push @{$config{disable_plugins}}, $plugin;
- }
+ $config{add_plugins}=[grep { $_ ne $plugin } @{$config{add_plugins}}];
+ push @{$config{disable_plugins}}, $plugin;
}
sub showform ($$) {
my $cgi=shift;
my $session=shift;
+ IkiWiki::needsignin($cgi, $session);
+
if (! defined $session->param("name") ||
! IkiWiki::is_admin($session->param("name"))) {
error(gettext("you are not logged in as an admin"));
@@ -254,11 +286,10 @@ sub showform ($$) {
params => $cgi,
fieldsets => [
[main => gettext("main")],
- [plugins => gettext("plugins")]
],
action => $config{cgiurl},
template => {type => 'div'},
- stylesheet => IkiWiki::baseurl()."style.css",
+ stylesheet => 1,
);
$form->field(name => "do", type => "hidden", value => "setup",
@@ -290,7 +321,6 @@ sub showform ($$) {
shift->(form => $form, cgi => $cgi, session => $session,
buttons => $buttons);
});
- IkiWiki::decode_form_utf8($form);
my %fields=showfields($form, undef, undef, IkiWiki::getsetup());
@@ -308,6 +338,8 @@ sub showform ($$) {
$fields{$_}=$shown{$_} foreach keys %shown;
}
}
+
+ IkiWiki::decode_form_utf8($form);
if ($form->submitted eq "Cancel") {
IkiWiki::redirect($cgi, $config{url});
@@ -326,7 +358,7 @@ sub showform ($$) {
@value=0;
}
- if (! $info{safe}) {
+ if (! $info{safe} || ! issafe($key)) {
error("unsafe field $key"); # should never happen
}
@@ -357,7 +389,11 @@ sub showform ($$) {
@value=sort grep { length $_ } @value;
my @oldvalue=sort grep { length $_ }
(defined $config{$key} ? @{$config{$key}} : ());
- if ((@oldvalue) == (@value)) {
+ my $same=(@oldvalue) == (@value);
+ for (my $x=0; $same && $x < @value; $x++) {
+ $same=0 if $value[$x] ne $oldvalue[$x];
+ }
+ if ($same) {
delete $rebuild{$field};
}
else {
@@ -410,8 +446,8 @@ sub showform ($$) {
IkiWiki::unlockwiki();
# Print the top part of a standard misctemplate,
- # then show the rebuild or refresh.
- my $divider="xxx";
+ # then show the rebuild or refresh, live.
+ my $divider="\0";
my $html=IkiWiki::misctemplate("setup", $divider);
IkiWiki::printheader($session);
my ($head, $tail)=split($divider, $html, 2);
@@ -463,9 +499,10 @@ sub formbuilder_setup (@) {
my %params=@_;
my $form=$params{form};
- if ($form->title eq "preferences") {
- push @{$params{buttons}}, "Wiki Setup";
- if ($form->submitted && $form->submitted eq "Wiki Setup") {
+ if ($form->title eq "preferences" &&
+ IkiWiki::is_admin($params{session}->param("name"))) {
+ push @{$params{buttons}}, "Setup";
+ if ($form->submitted && $form->submitted eq "Setup") {
showform($params{cgi}, $params{session});
exit;
}
diff --git a/IkiWiki/Plugin/wikitext.pm b/IkiWiki/Plugin/wikitext.pm
index accb03bbe..b24630b15 100644
--- a/IkiWiki/Plugin/wikitext.pm
+++ b/IkiWiki/Plugin/wikitext.pm
@@ -16,6 +16,7 @@ sub getsetup () {
plugin => {
safe => 0, # format plugin
rebuild => undef,
+ section => "format",
},
}
diff --git a/IkiWiki/Plugin/wmd.pm b/IkiWiki/Plugin/wmd.pm
index 9ddd237ab..71d7c9d17 100644
--- a/IkiWiki/Plugin/wmd.pm
+++ b/IkiWiki/Plugin/wmd.pm
@@ -4,8 +4,6 @@ package IkiWiki::Plugin::wmd;
use warnings;
use strict;
use IkiWiki 3.00;
-use POSIX;
-use Encode;
sub import {
add_underlay("wmd");
@@ -17,6 +15,8 @@ sub getsetup () {
return
plugin => {
safe => 1,
+ rebuild => 0,
+ section => "web",
},
}
diff --git a/IkiWiki/Receive.pm b/IkiWiki/Receive.pm
index 37b6f2a62..fdd463025 100644
--- a/IkiWiki/Receive.pm
+++ b/IkiWiki/Receive.pm
@@ -1,5 +1,4 @@
#!/usr/bin/perl
-
package IkiWiki::Receive;
use warnings;
@@ -20,9 +19,9 @@ sub trusted () {
! grep { $_ eq $user } @{$config{untrusted_committers}};
}
-sub gen_wrapper () {
+sub genwrapper () {
# Test for commits from untrusted committers in the wrapper, to
- # avoid loading ikiwiki at all for trusted commits.
+ # avoid starting ikiwiki proper at all for trusted commits.
my $ret=<<"EOF";
{
@@ -37,6 +36,8 @@ EOF
"u != $uid";
} @{$config{untrusted_committers}}).
") exit(0);\n";
+
+
$ret.=<<"EOF";
asprintf(&s, "CALLER_UID=%i", u);
newenviron[i++]=s;
@@ -56,7 +57,6 @@ sub test () {
eval q{use CGI};
error($@) if $@;
my $cgi=CGI->new;
- $ENV{REMOTE_ADDR}='unknown' unless exists $ENV{REMOTE_ADDR};
# And dummy up a session object.
require IkiWiki::CGI;
@@ -81,7 +81,7 @@ sub test () {
my ($file)=$change->{file}=~/$config{wiki_file_regexp}/;
$file=IkiWiki::possibly_foolish_untaint($file);
if (! defined $file || ! length $file ||
- IkiWiki::file_pruned($file, $config{srcdir})) {
+ IkiWiki::file_pruned($file)) {
error(gettext("bad file name %s"), $file);
}
@@ -114,7 +114,7 @@ sub test () {
# by not testing the removal in such pairs of changes.
# (The add is still tested, just to make sure that
# no data is added to the repo that a web edit
- # could add.)
+ # could not add.)
next if $newfiles{$file};
if (IkiWiki::Plugin::remove->can("check_canremove")) {
diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm
index 5cb67ea07..a653ab2da 100644
--- a/IkiWiki/Render.pm
+++ b/IkiWiki/Render.pm
@@ -7,27 +7,32 @@ use strict;
use IkiWiki;
use Encode;
-my %backlinks;
-my $backlinks_calculated=0;
+my (%backlinks, %rendered);
+our %brokenlinks;
+my $links_calculated=0;
-sub calculate_backlinks () {
- return if $backlinks_calculated;
- %backlinks=();
+sub calculate_links () {
+ return if $links_calculated;
+ %backlinks=%brokenlinks=();
foreach my $page (keys %links) {
foreach my $link (@{$links{$page}}) {
my $bestlink=bestlink($page, $link);
- if (length $bestlink && $bestlink ne $page) {
- $backlinks{$bestlink}{$page}=1;
+ if (length $bestlink) {
+ $backlinks{$bestlink}{$page}=1
+ if $bestlink ne $page;
+ }
+ else {
+ push @{$brokenlinks{$link}}, $page;
}
}
}
- $backlinks_calculated=1;
+ $links_calculated=1;
}
sub backlink_pages ($) {
my $page=shift;
- calculate_backlinks();
+ calculate_links();
return keys %{$backlinks{$page}};
}
@@ -38,7 +43,7 @@ sub backlinks ($) {
my @links;
foreach my $p (backlink_pages($page)) {
my $href=urlto($p, $page);
-
+
# Trim common dir prefixes from both pages.
my $p_trimmed=$p;
my $page_trimmed=$page;
@@ -56,6 +61,10 @@ sub backlinks ($) {
sub genpage ($$) {
my $page=shift;
my $content=shift;
+
+ run_hooks(indexhtml => sub {
+ shift->(page => $page, destpage => $page, content => $content);
+ });
my $templatefile;
run_hooks(templatefile => sub {
@@ -65,17 +74,24 @@ sub genpage ($$) {
$templatefile=$file;
}
});
- my $template=template(defined $templatefile ? $templatefile : 'page.tmpl', blind_cache => 1);
- my $actions=0;
+ my $template;
+ if (defined $templatefile) {
+ $template=template_depends($templatefile, $page,
+ blind_cache => 1);
+ }
+ else {
+ # no explicit depends as special case
+ $template=template('page.tmpl',
+ blind_cache => 1);
+ }
+ my $actions=0;
if (length $config{cgiurl}) {
- $template->param(editurl => cgiurl(do => "edit", page => $page))
- if IkiWiki->can("cgi_editpage");
- $template->param(prefsurl => cgiurl(do => "prefs"))
- if exists $hooks{auth};
- $actions++;
+ if (IkiWiki->can("cgi_editpage")) {
+ $template->param(editurl => cgiurl(do => "edit", page => $page));
+ $actions++;
+ }
}
-
if (defined $config{historyurl} && length $config{historyurl}) {
my $u=$config{historyurl};
$u=~s/\[\[file\]\]/$pagesources{$page}/g;
@@ -83,18 +99,17 @@ sub genpage ($$) {
$actions++;
}
if ($config{discussion}) {
- my $discussionlink=lc(gettext("Discussion"));
- if ($page !~ /.*\/\Q$discussionlink\E$/ &&
+ if ($page !~ /.*\/\Q$config{discussionpage}\E$/i &&
(length $config{cgiurl} ||
- exists $links{$page."/".$discussionlink})) {
- $template->param(discussionlink => htmllink($page, $page, gettext("Discussion"), noimageinline => 1, forcesubpage => 1));
+ exists $links{$page."/".$config{discussionpage}})) {
+ $template->param(discussionlink => htmllink($page, $page, $config{discussionpage}, noimageinline => 1, forcesubpage => 1));
$actions++;
}
}
-
if ($actions) {
$template->param(have_actions => 1);
}
+ templateactions($template, $page);
my @backlinks=sort { $a->{page} cmp $b->{page} } backlinks($page);
my ($backlinks, $more_backlinks);
@@ -116,8 +131,9 @@ sub genpage ($$) {
backlinks => $backlinks,
more_backlinks => $more_backlinks,
mtime => displaytime($pagemtime{$page}),
- ctime => displaytime($pagectime{$page}),
+ ctime => displaytime($pagectime{$page}, undef, 1),
baseurl => baseurl($page),
+ html5 => $config{html5},
);
run_hooks(pagetemplate => sub {
@@ -126,10 +142,6 @@ sub genpage ($$) {
$content=$template->output;
- run_hooks(postscan => sub {
- shift->(page => $page, content => $content);
- });
-
run_hooks(format => sub {
$content=shift->(
page => $page,
@@ -143,6 +155,8 @@ sub genpage ($$) {
sub scan ($) {
my $file=shift;
+ debug(sprintf(gettext("scanning %s"), $file));
+
my $type=pagetype($file);
if (defined $type) {
my $srcfile=srcfile($file);
@@ -153,11 +167,12 @@ sub scan ($) {
if ($config{discussion}) {
# Discussion links are a special case since they're
# not in the text of the page, but on its template.
- $links{$page}=[ $page."/".lc(gettext("Discussion")) ];
+ $links{$page}=[ $page."/".lc($config{discussionpage}) ];
}
else {
$links{$page}=[];
}
+ delete $typedlinks{$page};
run_hooks(scan => sub {
shift->(
@@ -198,14 +213,18 @@ sub fast_file_copy (@) {
}
}
-sub render ($) {
+sub render ($$) {
my $file=shift;
+ return if $rendered{$file};
+ debug(shift);
+ $rendered{$file}=1;
my $type=pagetype($file);
my $srcfile=srcfile($file);
if (defined $type) {
my $page=pagename($file);
delete $depends{$page};
+ delete $depends_simple{$page};
will_render($page, htmlpage($page), 1);
return if $type=~/^_/;
@@ -220,6 +239,7 @@ sub render ($) {
}
else {
delete $depends{$file};
+ delete $depends_simple{$file};
will_render($file, $file, 1);
if ($config{hardlink}) {
@@ -267,79 +287,113 @@ sub srcdir_check () {
}
sub find_src_files () {
- my (@files, %pages);
+ my @files;
+ my %pages;
eval q{use File::Find};
error($@) if $@;
- find({
- no_chdir => 1,
- wanted => sub {
- $_=decode_utf8($_);
- if (file_pruned($_, $config{srcdir})) {
- $File::Find::prune=1;
- }
- elsif (! -l $_ && ! -d _) {
- my ($f)=/$config{wiki_file_regexp}/; # untaint
- if (! defined $f) {
- warn(sprintf(gettext("skipping bad filename %s"), $_)."\n");
- }
- else {
- $f=~s/^\Q$config{srcdir}\E\/?//;
+
+ eval q{use Cwd};
+ die $@ if $@;
+ my $origdir=getcwd();
+ my $abssrcdir=Cwd::abs_path($config{srcdir});
+
+ my ($page, $underlay);
+ my $helper=sub {
+ my $file=decode_utf8($_);
+
+ return if -l $file || -d _;
+ $file=~s/^\.\///;
+ return if ! length $file;
+ $page = pagename($file);
+ if (! exists $pagesources{$page} &&
+ file_pruned($file)) {
+ $File::Find::prune=1;
+ return;
+ }
+
+ my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint
+ if (! defined $f) {
+ warn(sprintf(gettext("skipping bad filename %s"), $file)."\n");
+ return;
+ }
+
+ if ($underlay) {
+ # avoid underlaydir override attacks; see security.mdwn
+ if (! -l "$abssrcdir/$f" && ! -e _) {
+ if (! $pages{$page}) {
push @files, $f;
- my $pagename = pagename($f);
- if ($pages{$pagename}) {
- debug(sprintf(gettext("%s has multiple possible source pages"), $pagename));
- }
- $pages{$pagename}=1;
+ $pages{$page}=1;
}
}
- },
- }, $config{srcdir});
- foreach my $dir (@{$config{underlaydirs}}, $config{underlaydir}) {
- find({
- no_chdir => 1,
- wanted => sub {
- $_=decode_utf8($_);
- if (file_pruned($_, $dir)) {
- $File::Find::prune=1;
- }
- elsif (! -l $_ && ! -d _) {
- my ($f)=/$config{wiki_file_regexp}/; # untaint
- if (! defined $f) {
- warn(sprintf(gettext("skipping bad filename %s"), $_)."\n");
- }
- else {
- $f=~s/^\Q$dir\E\/?//;
- # avoid underlaydir
- # override attacks; see
- # security.mdwn
- if (! -l "$config{srcdir}/$f" &&
- ! -e _) {
- my $page=pagename($f);
- if (! $pages{$page}) {
- push @files, $f;
- $pages{$page}=1;
- }
- }
- }
- }
- },
- }, $dir);
+ }
+ else {
+ push @files, $f;
+ if ($pages{$page}) {
+ debug(sprintf(gettext("%s has multiple possible source pages"), $page));
+ }
+ $pages{$page}=1;
+ }
+ };
+
+ chdir($config{srcdir}) || die "chdir $config{srcdir}: $!";
+ find({
+ no_chdir => 1,
+ wanted => $helper,
+ }, '.');
+ chdir($origdir) || die "chdir $origdir: $!";
+
+ $underlay=1;
+ foreach (@{$config{underlaydirs}}, $config{underlaydir}) {
+ if (chdir($_)) {
+ find({
+ no_chdir => 1,
+ wanted => $helper,
+ }, '.');
+ chdir($origdir) || die "chdir: $!";
+ }
};
- # Returns a list of all source files found, and a hash of
- # the corresponding page names.
return \@files, \%pages;
}
-sub refresh () {
- srcdir_check();
- run_hooks(refresh => sub { shift->() });
- my ($files, $exists)=find_src_files();
+sub find_new_files ($) {
+ my $files=shift;
+ my @new;
+ my @internal_new;
+
+ my $times_noted;
- my (%rendered, @add, @del, @internal);
- # check for added or removed pages
foreach my $file (@$files) {
my $page=pagename($file);
+
+ if ($config{rcs} && $config{gettime} &&
+ -e "$config{srcdir}/$file") {
+ if (! $times_noted) {
+ debug(sprintf(gettext("querying %s for file creation and modification times.."), $config{rcs}));
+ $times_noted=1;
+ }
+
+ eval {
+ my $ctime=rcs_getctime($file);
+ if ($ctime > 0) {
+ $pagectime{$page}=$ctime;
+ }
+ };
+ if ($@) {
+ print STDERR $@;
+ }
+ my $mtime;
+ eval {
+ $mtime=rcs_getmtime($file);
+ };
+ if ($@) {
+ print STDERR $@;
+ }
+ elsif ($mtime > 0) {
+ utime($mtime, $mtime, "$config{srcdir}/$file");
+ }
+ }
+
if (exists $pagesources{$page} && $pagesources{$page} ne $file) {
# the page has changed its type
$forcerebuild{$page}=1;
@@ -347,19 +401,10 @@ sub refresh () {
$pagesources{$page}=$file;
if (! $pagemtime{$page}) {
if (isinternal($page)) {
- push @internal, $file;
+ push @internal_new, $file;
}
else {
- push @add, $file;
- if ($config{getctime} && -e "$config{srcdir}/$file") {
- eval {
- my $time=rcs_getctime("$config{srcdir}/$file");
- $pagectime{$page}=$time;
- };
- if ($@) {
- print STDERR $@;
- }
- }
+ push @new, $file;
}
$pagecase{lc $page}=$page;
if (! exists $pagectime{$page}) {
@@ -367,31 +412,60 @@ sub refresh () {
}
}
}
- foreach my $page (keys %pagemtime) {
- if (! $exists->{$page}) {
+
+ return \@new, \@internal_new;
+}
+
+sub find_del_files ($) {
+ my $pages=shift;
+ my @del;
+ my @internal_del;
+
+ foreach my $page (keys %pagesources) {
+ if (! $pages->{$page}) {
if (isinternal($page)) {
- push @internal, $pagesources{$page};
+ push @internal_del, $pagesources{$page};
}
else {
- debug(sprintf(gettext("removing old page %s"), $page));
push @del, $pagesources{$page};
}
$links{$page}=[];
+ delete $typedlinks{$page};
$renderedfiles{$page}=[];
$pagemtime{$page}=0;
- prune($config{destdir}."/".$_)
- foreach @{$oldrenderedfiles{$page}};
- delete $pagesources{$page};
- foreach (keys %destsources) {
- if ($destsources{$_} eq $page) {
- delete $destsources{$_};
- }
+ }
+ }
+
+ return \@del, \@internal_del;
+}
+
+sub remove_del (@) {
+ foreach my $file (@_) {
+ my $page=pagename($file);
+ if (! isinternal($page)) {
+ debug(sprintf(gettext("removing obsolete %s"), $page));
+ }
+
+ foreach my $old (@{$oldrenderedfiles{$page}}) {
+ prune($config{destdir}."/".$old);
+ }
+
+ foreach my $source (keys %destsources) {
+ if ($destsources{$source} eq $page) {
+ delete $destsources{$source};
}
}
+
+ delete $pagecase{lc $page};
+ $delpagesources{$page}=$pagesources{$page};
+ delete $pagesources{$page};
}
+}
- # find changed and new files
- my @needsbuild;
+sub find_changed ($) {
+ my $files=shift;
+ my @changed;
+ my @internal_changed;
foreach my $file (@$files) {
my $page=pagename($file);
my ($srcfile, @stat)=srcfile_stat($file);
@@ -399,132 +473,356 @@ sub refresh () {
$stat[9] > $pagemtime{$page} ||
$forcerebuild{$page}) {
$pagemtime{$page}=$stat[9];
+
if (isinternal($page)) {
- push @internal, $file;
# Preprocess internal page in scan-only mode.
preprocess($page, $page, readfile($srcfile), 1);
+ push @internal_changed, $file;
}
else {
- push @needsbuild, $file;
+ push @changed, $file;
}
}
}
- run_hooks(needsbuild => sub { shift->(\@needsbuild) });
+ return \@changed, \@internal_changed;
+}
- # scan and render files
- foreach my $file (@needsbuild) {
- debug(sprintf(gettext("scanning %s"), $file));
- scan($file);
- }
- calculate_backlinks();
- foreach my $file (@needsbuild) {
- debug(sprintf(gettext("building %s"), $file));
- render($file);
- $rendered{$file}=1;
- }
- foreach my $file (@internal) {
- # internal pages are not rendered
+sub calculate_old_links ($$) {
+ my ($changed, $del)=@_;
+ my %oldlink_targets;
+ foreach my $file (@$changed, @$del) {
my $page=pagename($file);
- delete $depends{$page};
- foreach my $old (@{$renderedfiles{$page}}) {
- delete $destsources{$old};
+ if (exists $oldlinks{$page}) {
+ foreach my $l (@{$oldlinks{$page}}) {
+ $oldlink_targets{$page}{$l}=bestlink($page, $l);
+ }
}
- $renderedfiles{$page}=[];
}
-
- # rebuild pages that link to added or removed pages
- if (@add || @del) {
- foreach my $f (@add, @del) {
- my $p=pagename($f);
- foreach my $page (keys %{$backlinks{$p}}) {
- my $file=$pagesources{$page};
- next if $rendered{$file};
- debug(sprintf(gettext("building %s, which links to %s"), $file, $p));
- render($file);
- $rendered{$file}=1;
+ return \%oldlink_targets;
+}
+
+sub derender_internal ($) {
+ my $file=shift;
+ my $page=pagename($file);
+ delete $depends{$page};
+ delete $depends_simple{$page};
+ foreach my $old (@{$renderedfiles{$page}}) {
+ delete $destsources{$old};
+ }
+ $renderedfiles{$page}=[];
+}
+
+sub render_linkers ($) {
+ my $f=shift;
+ my $p=pagename($f);
+ foreach my $page (keys %{$backlinks{$p}}) {
+ my $file=$pagesources{$page};
+ render($file, sprintf(gettext("building %s, which links to %s"), $file, $p));
+ }
+}
+
+sub remove_unrendered () {
+ foreach my $src (keys %rendered) {
+ my $page=pagename($src);
+ foreach my $file (@{$oldrenderedfiles{$page}}) {
+ if (! grep { $_ eq $file } @{$renderedfiles{$page}}) {
+ debug(sprintf(gettext("removing %s, no longer built by %s"), $file, $page));
+ prune($config{destdir}."/".$file);
}
}
}
+}
- if (%rendered || @del || @internal) {
- my @changed=(keys %rendered, @del);
+sub link_types_changed ($$) {
+ # each is of the form { type => { link => 1 } }
+ my $new = shift;
+ my $old = shift;
- # rebuild dependant pages
- foreach my $f (@$files) {
- next if $rendered{$f};
- my $p=pagename($f);
- if (exists $depends{$p}) {
- # only consider internal files
- # if the page explicitly depends on such files
- foreach my $file (@changed, $depends{$p}=~/internal\(/ ? @internal : ()) {
- next if $f eq $file;
- my $page=pagename($file);
- if (pagespec_match($page, $depends{$p}, location => $p)) {
- debug(sprintf(gettext("building %s, which depends on %s"), $f, $page));
- render($f);
- $rendered{$f}=1;
- last;
- }
+ return 0 if !defined $new && !defined $old;
+ return 1 if (!defined $new && %$old) || (!defined $old && %$new);
+
+ while (my ($type, $links) = each %$new) {
+ foreach my $link (keys %$links) {
+ return 1 unless exists $old->{$type}{$link};
+ }
+ }
+
+ while (my ($type, $links) = each %$old) {
+ foreach my $link (keys %$links) {
+ return 1 unless exists $new->{$type}{$link};
+ }
+ }
+
+ return 0;
+}
+
+sub calculate_changed_links ($$$) {
+ my ($changed, $del, $oldlink_targets)=@_;
+
+ my (%backlinkchanged, %linkchangers);
+
+ foreach my $file (@$changed, @$del) {
+ my $page=pagename($file);
+
+ if (exists $links{$page}) {
+ foreach my $l (@{$links{$page}}) {
+ my $target=bestlink($page, $l);
+ if (! exists $oldlink_targets->{$page}{$l} ||
+ $target ne $oldlink_targets->{$page}{$l}) {
+ $backlinkchanged{$target}=1;
+ $linkchangers{lc($page)}=1;
}
+ delete $oldlink_targets->{$page}{$l};
}
}
-
- # handle backlinks; if a page has added/removed links,
- # update the pages it links to
- my %linkchanged;
- foreach my $file (@changed) {
- my $page=pagename($file);
-
- if (exists $links{$page}) {
- foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
- if (length $link &&
- (! exists $oldlinks{$page} ||
- ! grep { bestlink($page, $_) eq $link } @{$oldlinks{$page}})) {
- $linkchanged{$link}=1;
- }
+ if (exists $oldlink_targets->{$page} &&
+ %{$oldlink_targets->{$page}}) {
+ foreach my $target (values %{$oldlink_targets->{$page}}) {
+ $backlinkchanged{$target}=1;
+ }
+ $linkchangers{lc($page)}=1;
+ }
+
+ # we currently assume that changing the type of a link doesn't
+ # change backlinks
+ if (!exists $linkchangers{lc($page)}) {
+ if (link_types_changed($typedlinks{$page}, $oldtypedlinks{$page})) {
+ $linkchangers{lc($page)}=1;
+ }
+ }
+ }
+
+ return \%backlinkchanged, \%linkchangers;
+}
+
+sub render_dependent ($$$$$$$) {
+ my ($files, $new, $internal_new, $del, $internal_del,
+ $internal_changed, $linkchangers)=@_;
+
+ my @changed=(keys %rendered, @$del);
+ my @exists_changed=(@$new, @$del);
+
+ my %lc_changed = map { lc(pagename($_)) => 1 } @changed;
+ my %lc_exists_changed = map { lc(pagename($_)) => 1 } @exists_changed;
+
+ foreach my $p ("templates/page.tmpl", keys %{$depends_simple{""}}) {
+ if ($rendered{$p} || grep { $_ eq $p } @$del) {
+ foreach my $f (@$files) {
+ next if $rendered{$f};
+ render($f, sprintf(gettext("building %s, which depends on %s"), $f, $p));
+ }
+ return 0;
+ }
+ }
+
+ foreach my $f (@$files) {
+ next if $rendered{$f};
+ my $p=pagename($f);
+ my $reason = undef;
+
+ if (exists $depends_simple{$p} && ! defined $reason) {
+ foreach my $d (keys %{$depends_simple{$p}}) {
+ if (($depends_simple{$p}{$d} & $IkiWiki::DEPEND_CONTENT &&
+ $lc_changed{$d})
+ ||
+ ($depends_simple{$p}{$d} & $IkiWiki::DEPEND_PRESENCE &&
+ $lc_exists_changed{$d})
+ ||
+ ($depends_simple{$p}{$d} & $IkiWiki::DEPEND_LINKS &&
+ $linkchangers->{$d})
+ ) {
+ $reason = $d;
+ last;
}
}
- if (exists $oldlinks{$page}) {
- foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
- if (length $link &&
- (! exists $links{$page} ||
- ! grep { bestlink($page, $_) eq $link } @{$links{$page}})) {
- $linkchanged{$link}=1;
+ }
+
+ if (exists $depends{$p} && ! defined $reason) {
+ foreach my $dep (keys %{$depends{$p}}) {
+ my $sub=pagespec_translate($dep);
+ next unless defined $sub;
+
+ # only consider internal files
+ # if the page explicitly depends
+ # on such files
+ my $internal_dep=$dep =~ /(?:internal|comment|comment_pending)\(/;
+
+ my $in=sub {
+ my $list=shift;
+ my $type=shift;
+ foreach my $file (@$list) {
+ next if $file eq $f;
+ my $page=pagename($file);
+ if ($sub->($page, location => $p)) {
+ if ($type == $IkiWiki::DEPEND_LINKS) {
+ next unless $linkchangers->{lc($page)};
+ }
+ $reason=$page;
+ return 1;
+ }
}
+ return undef;
+ };
+
+ if ($depends{$p}{$dep} & $IkiWiki::DEPEND_CONTENT) {
+ last if $in->(\@changed, $IkiWiki::DEPEND_CONTENT);
+ last if $internal_dep && (
+ $in->($internal_new, $IkiWiki::DEPEND_CONTENT) ||
+ $in->($internal_del, $IkiWiki::DEPEND_CONTENT) ||
+ $in->($internal_changed, $IkiWiki::DEPEND_CONTENT)
+ );
+ }
+ if ($depends{$p}{$dep} & $IkiWiki::DEPEND_PRESENCE) {
+ last if $in->(\@exists_changed, $IkiWiki::DEPEND_PRESENCE);
+ last if $internal_dep && (
+ $in->($internal_new, $IkiWiki::DEPEND_PRESENCE) ||
+ $in->($internal_del, $IkiWiki::DEPEND_PRESENCE)
+ );
+ }
+ if ($depends{$p}{$dep} & $IkiWiki::DEPEND_LINKS) {
+ last if $in->(\@changed, $IkiWiki::DEPEND_LINKS);
+ last if $internal_dep && (
+ $in->($internal_new, $IkiWiki::DEPEND_LINKS) ||
+ $in->($internal_del, $IkiWiki::DEPEND_LINKS) ||
+ $in->($internal_changed, $IkiWiki::DEPEND_LINKS)
+ );
}
}
}
+
+ if (defined $reason) {
+ render($f, sprintf(gettext("building %s, which depends on %s"), $f, $reason));
+ return 1;
+ }
+ }
- foreach my $link (keys %linkchanged) {
- my $linkfile=$pagesources{$link};
- if (defined $linkfile) {
- next if $rendered{$linkfile};
- debug(sprintf(gettext("building %s, to update its backlinks"), $linkfile));
- render($linkfile);
- $rendered{$linkfile}=1;
- }
+ return 0;
+}
+
+sub render_backlinks ($) {
+ my $backlinkchanged=shift;
+ foreach my $link (keys %$backlinkchanged) {
+ my $linkfile=$pagesources{$link};
+ if (defined $linkfile) {
+ render($linkfile, sprintf(gettext("building %s, to update its backlinks"), $linkfile));
}
}
+}
- # remove no longer rendered files
- foreach my $src (keys %rendered) {
- my $page=pagename($src);
- foreach my $file (@{$oldrenderedfiles{$page}}) {
- if (! grep { $_ eq $file } @{$renderedfiles{$page}}) {
- debug(sprintf(gettext("removing %s, no longer built by %s"), $file, $page));
- prune($config{destdir}."/".$file);
- }
+sub gen_autofile ($$$) {
+ my $autofile=shift;
+ my $pages=shift;
+ my $del=shift;
+
+ if (file_pruned($autofile)) {
+ return;
+ }
+
+ my ($file)="$config{srcdir}/$autofile" =~ /$config{wiki_file_regexp}/; # untaint
+ if (! defined $file) {
+ return;
+ }
+
+ # Remember autofiles that were tried, and never try them again later.
+ if (exists $wikistate{$autofiles{$autofile}{plugin}}{autofile}{$autofile}) {
+ return;
+ }
+ $wikistate{$autofiles{$autofile}{plugin}}{autofile}{$autofile}=1;
+
+ if (srcfile($autofile, 1) || file_pruned($autofile)) {
+ return;
+ }
+
+ if (-l $file || -d _ || -e _) {
+ return;
+ }
+
+ my $page = pagename($file);
+ if ($pages->{$page}) {
+ return;
+ }
+
+ if (grep { $_ eq $autofile } @$del) {
+ return;
+ }
+
+ $autofiles{$autofile}{generator}->();
+ $pages->{$page}=1;
+ return 1;
+}
+
+
+sub refresh () {
+ srcdir_check();
+ run_hooks(refresh => sub { shift->() });
+ my ($files, $pages)=find_src_files();
+ my ($new, $internal_new)=find_new_files($files);
+ my ($del, $internal_del)=find_del_files($pages);
+ my ($changed, $internal_changed)=find_changed($files);
+ run_hooks(needsbuild => sub { shift->($changed) });
+ my $oldlink_targets=calculate_old_links($changed, $del);
+
+ foreach my $file (@$changed) {
+ scan($file);
+ }
+
+ foreach my $autofile (keys %autofiles) {
+ if (gen_autofile($autofile, $pages, $del)) {
+ push @{$files}, $autofile;
+ push @{$new}, $autofile if find_new_files([$autofile]);
+ push @{$changed}, $autofile if find_changed([$autofile]);
+
+ scan($autofile);
}
}
- if (@del) {
- run_hooks(delete => sub { shift->(@del) });
+ calculate_links();
+
+ remove_del(@$del, @$internal_del);
+
+ foreach my $file (@$changed) {
+ render($file, sprintf(gettext("building %s"), $file));
+ }
+ foreach my $file (@$internal_new, @$internal_del, @$internal_changed) {
+ derender_internal($file);
+ }
+
+ my ($backlinkchanged, $linkchangers)=calculate_changed_links($changed,
+ $del, $oldlink_targets);
+
+ foreach my $file (@$new, @$del) {
+ render_linkers($file);
+ }
+
+ if (@$changed || @$internal_changed ||
+ @$del || @$internal_del || @$internal_new) {
+ 1 while render_dependent($files, $new, $internal_new,
+ $del, $internal_del, $internal_changed,
+ $linkchangers);
+ }
+
+ render_backlinks($backlinkchanged);
+ remove_unrendered();
+
+ if (@$del || @$internal_del) {
+ run_hooks(delete => sub { shift->(@$del, @$internal_del) });
}
if (%rendered) {
run_hooks(change => sub { shift->(keys %rendered) });
}
}
+sub clean_rendered {
+ lockwiki();
+ loadindex();
+ remove_unrendered();
+ foreach my $page (keys %oldrenderedfiles) {
+ foreach my $file (@{$oldrenderedfiles{$page}}) {
+ prune($config{destdir}."/".$file);
+ }
+ }
+}
+
sub commandline_render () {
lockwiki();
loadindex();
diff --git a/IkiWiki/Setup.pm b/IkiWiki/Setup.pm
index 6ee112096..48f3d4634 100644
--- a/IkiWiki/Setup.pm
+++ b/IkiWiki/Setup.pm
@@ -1,6 +1,8 @@
#!/usr/bin/perl
-# Ikiwiki setup files are perl files that 'use IkiWiki::Setup::foo',
-# passing it some sort of configuration data.
+# Ikiwiki setup files can be perl files that 'use IkiWiki::Setup::foo',
+# passing it some sort of configuration data. Or, they can contain
+# the module name at the top, without the 'use', and the whole file is
+# then fed into that module.
package IkiWiki::Setup;
@@ -10,23 +12,72 @@ use IkiWiki;
use open qw{:utf8 :std};
use File::Spec;
-sub load ($) {
- my $setup=IkiWiki::possibly_foolish_untaint(shift);
- $config{setupfile}=File::Spec->rel2abs($setup);
+sub load ($;$) {
+ my $file=IkiWiki::possibly_foolish_untaint(shift);
+ my $safemode=shift;
+
+ $config{setupfile}=File::Spec->rel2abs($file);
#translators: The first parameter is a filename, and the second
#translators: is a (probably not translated) error message.
- open (IN, $setup) || error(sprintf(gettext("cannot read %s: %s"), $setup, $!));
- my $code;
+ open (IN, $file) || error(sprintf(gettext("cannot read %s: %s"), $file, $!));
+ my $content;
{
local $/=undef;
- $code=<IN>;
+ $content=<IN> || error("$file: $!");
}
- ($code)=$code=~/(.*)/s;
close IN;
- eval $code;
- error("$setup: ".$@) if $@;
+ if ($content=~/((?:use|require)\s+)?IkiWiki::Setup::(\w+)/) {
+ $config{setuptype}=$2;
+ if ($1) {
+ error sprintf(gettext("cannot load %s in safe mode"), $file)
+ if $safemode;
+ no warnings;
+ eval IkiWiki::possibly_foolish_untaint($content);
+ error("$file: ".$@) if $@;
+ }
+ else {
+ eval qq{require IkiWiki::Setup::$config{setuptype}};
+ error $@ if $@;
+ "IkiWiki::Setup::$config{setuptype}"->loaddump(IkiWiki::possibly_foolish_untaint($content));
+ }
+ }
+ else {
+ error sprintf(gettext("failed to parse %s"), $file);
+ }
+}
+
+sub dump ($) {
+ my $file=IkiWiki::possibly_foolish_untaint(shift);
+
+ my @header=(
+ "Setup file for ikiwiki.",
+ "",
+ "Passing this to ikiwiki --setup will make ikiwiki generate",
+ "wrappers and build the wiki.",
+ "",
+ "Remember to re-run ikiwiki --setup any time you edit this file.",
+ );
+
+ # Fork because dumping setup requires loading all plugins.
+ my $pid=fork();
+ if ($pid == 0) {
+ eval qq{require IkiWiki::Setup::$config{setuptype}};
+ error $@ if $@;
+ my @dump="IkiWiki::Setup::$config{setuptype}"->gendump(@header);
+
+ open (OUT, ">", $file) || die "$file: $!";
+ print OUT "$_\n" foreach @dump;
+ close OUT;
+
+ exit 0;
+ }
+ else {
+ waitpid $pid, 0;
+ exit($? >> 8) if $? >> 8;
+ exit(1) if $?;
+ }
}
sub merge ($) {
@@ -73,10 +124,30 @@ sub merge ($) {
}
}
+sub disabled_plugins (@) {
+ # Handles running disable hooks of plugins that were enabled
+ # previously, but got disabled when a new setup file was loaded.
+ if (exists $config{setupfile} && @_) {
+ # Fork a child to load the disabled plugins.
+ my $pid=fork();
+ if ($pid == 0) {
+ foreach my $plugin (@_) {
+ eval { IkiWiki::loadplugin($plugin, 1) };
+ if (exists $IkiWiki::hooks{disable}{$plugin}{call}) {
+ eval { $IkiWiki::hooks{disable}{$plugin}{call}->() };
+ }
+ }
+ exit(0);
+ }
+ else {
+ waitpid $pid, 0;
+ }
+ }
+}
+
sub getsetup () {
# Gets all available setup data from all plugins. Returns an
# ordered list of [plugin, setup] pairs.
- my @ret;
# disable logging to syslog while dumping, broken plugins may
# whine when loaded
@@ -84,38 +155,118 @@ sub getsetup () {
$config{syslog}=undef;
# Load all plugins, so that all setup options are available.
- my @plugins=grep { $_ ne $config{rcs} } sort(IkiWiki::listplugins());
- unshift @plugins, $config{rcs} if $config{rcs}; # rcs plugin 1st
+ my %original_loaded_plugins=%IkiWiki::loaded_plugins;
+ my @plugins=IkiWiki::listplugins();
foreach my $plugin (@plugins) {
- eval { IkiWiki::loadplugin($plugin) };
+ eval { IkiWiki::loadplugin($plugin, 1) };
if (exists $IkiWiki::hooks{checkconfig}{$plugin}{call}) {
my @s=eval { $IkiWiki::hooks{checkconfig}{$plugin}{call}->() };
}
}
-
+ %IkiWiki::loaded_plugins=%original_loaded_plugins;
+
+ my %sections;
foreach my $plugin (@plugins) {
if (exists $IkiWiki::hooks{getsetup}{$plugin}{call}) {
# use an array rather than a hash, to preserve order
my @s=eval { $IkiWiki::hooks{getsetup}{$plugin}{call}->() };
next unless @s;
- push @ret, [ $plugin, \@s ],
+
+ # set default section value (note use of shared
+ # hashref between array and hash)
+ my %s=@s;
+ if (! exists $s{plugin} || ! $s{plugin}->{section}) {
+ $s{plugin}->{section}="other";
+ }
+
+ # only the selected rcs plugin is included
+ if ($config{rcs} && $plugin eq $config{rcs}) {
+ $s{plugin}->{section}="core";
+ }
+ elsif ($s{plugin}->{section} eq "rcs") {
+ next;
+ }
+
+ push @{$sections{$s{plugin}->{section}}}, [ $plugin, \@s ];
}
}
$config{syslog}=$syslog;
- return @ret;
+ return map { sort { $a->[0] cmp $b->[0] } @{$sections{$_}} }
+ sort { # core first, other last, otherwise alphabetical
+ ($b eq "core") <=> ($a eq "core")
+ ||
+ ($a eq "other") <=> ($b eq "other")
+ ||
+ $a cmp $b
+ } keys %sections;
}
-sub dump ($) {
- my $file=IkiWiki::possibly_foolish_untaint(shift);
+sub commented_dump ($$) {
+ my $dumpline=shift;
+ my $indent=shift;
+
+ my %setup=(%config);
+ my @ret;
- require IkiWiki::Setup::Standard;
- my @dump=IkiWiki::Setup::Standard::gendump("Setup file for ikiwiki.");
+ # disable logging to syslog while dumping
+ $config{syslog}=undef;
+
+ eval q{use Text::Wrap};
+ die $@ if $@;
+
+ my %section_plugins;
+ push @ret, commented_dumpvalues($dumpline, $indent, \%setup, IkiWiki::getsetup());
+ foreach my $pair (IkiWiki::Setup::getsetup()) {
+ my $plugin=$pair->[0];
+ my $setup=$pair->[1];
+ my %s=@{$setup};
+ my $section=$s{plugin}->{section};
+ push @{$section_plugins{$section}}, $plugin;
+ if (@{$section_plugins{$section}} == 1) {
+ push @ret, "", $indent.("#" x 70), "$indent# $section plugins",
+ sub {
+ wrap("$indent# (", "$indent# ",
+ join(", ", @{$section_plugins{$section}})).")"
+ },
+ $indent.("#" x 70);
+ }
- open (OUT, ">", $file) || die "$file: $!";
- print OUT "$_\n" foreach @dump;
- close OUT;
+ my @values=commented_dumpvalues($dumpline, $indent, \%setup, @{$setup});
+ if (@values) {
+ push @ret, "", "$indent# $plugin plugin", @values;
+ }
+ }
+
+ return map { ref $_ ? $_->() : $_ } @ret;
+}
+
+sub commented_dumpvalues ($$$@) {
+ my $dumpline=shift;
+ my $indent=shift;
+ my $setup=shift;
+ my @ret;
+ while (@_) {
+ my $key=shift;
+ my %info=%{shift()};
+
+ next if $key eq "plugin" || $info{type} eq "internal";
+
+ push @ret, "$indent# ".$info{description} if exists $info{description};
+
+ if (exists $setup->{$key} && defined $setup->{$key}) {
+ push @ret, $dumpline->($key, $setup->{$key}, $info{type}, "");
+ delete $setup->{$key};
+ }
+ elsif (exists $info{example}) {
+ push @ret, $dumpline->($key, $info{example}, $info{type}, "#");
+ }
+ else {
+ push @ret, $dumpline->($key, "", $info{type}, "#");
+ }
+ }
+ return @ret;
}
1
diff --git a/IkiWiki/Setup/Automator.pm b/IkiWiki/Setup/Automator.pm
index 742d67666..2dcb424e5 100644
--- a/IkiWiki/Setup/Automator.pm
+++ b/IkiWiki/Setup/Automator.pm
@@ -15,6 +15,7 @@ sub ask ($$) {
my ($question, $default)=@_;
my $r=Term::ReadLine->new("ikiwiki");
+ $r->ornaments("md,me");
$r->readline(encode_utf8($question)." ", $default);
}
@@ -24,27 +25,35 @@ sub prettydir ($) {
return $dir;
}
-sub import (@) {
- my $this=shift;
- IkiWiki::Setup::merge({@_});
+sub sanitize_wikiname ($) {
+ my $wikiname=shift;
# Sanitize this to avoid problimatic directory names.
- $config{wikiname}=~s/[^-A-Za-z0-9_]//g;
- if (! length $config{wikiname}) {
+ $wikiname=~s/[^-A-Za-z0-9_]//g;
+ if (! length $wikiname) {
error gettext("you must enter a wikiname (that contains alphanumerics)");
}
+ return $wikiname;
+}
- # Avoid overwriting any existing files.
- foreach my $key (qw{srcdir destdir repository dumpsetup}) {
- next unless exists $config{$key};
- my $add="";
- my $dir=IkiWiki::dirname($config{$key})."/";
- my $base=IkiWiki::basename($config{$key});
- while (-e $dir.$add.$base) {
- $add=1 if ! $add;
- $add++;
+sub import (@) {
+ my $this=shift;
+ $config{setuptype}='Standard';
+ IkiWiki::Setup::merge({@_});
+
+ if (! $config{force_overwrite}) {
+ # Avoid overwriting any existing files.
+ foreach my $key (qw{srcdir destdir repository dumpsetup}) {
+ next unless exists $config{$key};
+ my $add="";
+ my $dir=IkiWiki::dirname($config{$key})."/";
+ my $base=IkiWiki::basename($config{$key});
+ while (-e $dir.$add.$base) {
+ $add=1 if ! $add;
+ $add++;
+ }
+ $config{$key}=$dir.$add.$base;
}
- $config{$key}=$dir.$add.$base;
}
# Set up wrapper
@@ -63,9 +72,18 @@ sub import (@) {
}
elsif ($config{rcs} eq 'bzr') {
# TODO
+ print STDERR "warning: do not know how to set up the bzr_wrapper hook!\n";
}
elsif ($config{rcs} eq 'mercurial') {
# TODO
+ print STDERR "warning: do not know how to set up the mercurial_wrapper hook!\n";
+ }
+ elsif ($config{rcs} eq 'tla') {
+ # TODO
+ print STDERR "warning: do not know how to set up the tla_wrapper hook!\n";
+ }
+ elsif ($config{rcs} eq 'cvs') {
+ $config{cvs_wrapper}=$config{repository}."/CVSROOT/post-commit";
}
else {
error sprintf(gettext("unsupported revision control system %s"),
@@ -112,9 +130,10 @@ sub import (@) {
IkiWiki::run_hooks(checkconfig => sub { shift->() });
};
if ($@) {
+ my $err=$@;
print STDERR sprintf(gettext("** Disabling plugin %s, since it is failing with this message:"),
$plugin)."\n";
- print STDERR "$@\n";
+ print STDERR "$err\n";
push @{$bakconfig{disable_plugins}}, $plugin;
}
}
@@ -133,7 +152,7 @@ sub import (@) {
# Create admin user(s).
foreach my $admin (@{$config{adminuser}}) {
- next if $admin=~/^http\?:\/\//; # openid
+ next if defined IkiWiki::openiduser($admin);
# Prompt for password w/o echo.
my ($password, $password2);
diff --git a/IkiWiki/Setup/Standard.pm b/IkiWiki/Setup/Standard.pm
index 951bcfc56..c85069304 100644
--- a/IkiWiki/Setup/Standard.pm
+++ b/IkiWiki/Setup/Standard.pm
@@ -1,7 +1,4 @@
#!/usr/bin/perl
-# Standard ikiwiki setup module.
-# Parameters to import should be all the standard ikiwiki config stuff,
-# plus an array of wrappers to set up.
package IkiWiki::Setup::Standard;
@@ -9,10 +6,22 @@ use warnings;
use strict;
use IkiWiki;
+# Parameters to import should be all the standard ikiwiki config, in a hash.
sub import {
IkiWiki::Setup::merge($_[1]);
}
+sub gendump ($@) {
+ my $class=shift;
+
+ "#!/usr/bin/perl",
+ "#",
+ (map { "# $_" } @_),
+ "use IkiWiki::Setup::Standard {",
+ IkiWiki::Setup::commented_dump(\&dumpline, "\t"),
+ "}"
+}
+
sub dumpline ($$$$) {
my $key=shift;
my $value=shift;
@@ -57,61 +66,4 @@ sub dumpline ($$$$) {
return "\t$prefix$key => $dumpedvalue,";
}
-sub dumpvalues ($@) {
- my $setup=shift;
- my @ret;
- while (@_) {
- my $key=shift;
- my %info=%{shift()};
-
- next if $key eq "plugin" || $info{type} eq "internal";
-
- push @ret, "\t# ".$info{description} if exists $info{description};
-
- if (exists $setup->{$key} && defined $setup->{$key}) {
- push @ret, dumpline($key, $setup->{$key}, $info{type}, "");
- delete $setup->{$key};
- }
- elsif (exists $info{example}) {
- push @ret, dumpline($key, $info{example}, $info{type}, "#");
- }
- else {
- push @ret, dumpline($key, "", $info{type}, "#");
- }
- }
- return @ret;
-}
-
-sub gendump ($) {
- my $description=shift;
- my %setup=(%config);
- my @ret;
-
- # disable logging to syslog while dumping
- $config{syslog}=undef;
-
- push @ret, dumpvalues(\%setup, IkiWiki::getsetup());
- foreach my $pair (IkiWiki::Setup::getsetup()) {
- my $plugin=$pair->[0];
- my $setup=$pair->[1];
- my @values=dumpvalues(\%setup, @{$setup});
- if (@values) {
- push @ret, "", "\t# $plugin plugin", @values;
- }
- }
-
- unshift @ret,
- "#!/usr/bin/perl",
- "# $description",
- "#",
- "# Passing this to ikiwiki --setup will make ikiwiki generate",
- "# wrappers and build the wiki.",
- "#",
- "# Remember to re-run ikiwiki --setup any time you edit this file.",
- "use IkiWiki::Setup::Standard {";
- push @ret, "}";
-
- return @ret;
-}
-
1
diff --git a/IkiWiki/Setup/Yaml.pm b/IkiWiki/Setup/Yaml.pm
new file mode 100644
index 000000000..904784728
--- /dev/null
+++ b/IkiWiki/Setup/Yaml.pm
@@ -0,0 +1,50 @@
+#!/usr/bin/perl
+
+package IkiWiki::Setup::Yaml;
+
+use warnings;
+use strict;
+use IkiWiki;
+
+sub loaddump ($$) {
+ my $class=shift;
+ my $content=shift;
+
+ eval q{use YAML::Any};
+ eval q{use YAML} if $@;
+ die $@ if $@;
+ $YAML::Syck::ImplicitUnicode=1;
+ IkiWiki::Setup::merge(Load($content));
+}
+
+sub gendump ($@) {
+ my $class=shift;
+
+ "# IkiWiki::Setup::Yaml - YAML formatted setup file",
+ "#",
+ (map { "# $_" } @_),
+ "#",
+ IkiWiki::Setup::commented_dump(\&dumpline, "")
+}
+
+
+sub dumpline ($$$$) {
+ my $key=shift;
+ my $value=shift;
+ my $type=shift;
+ my $prefix=shift;
+
+ eval q{use YAML::Old};
+ eval q{use YAML} if $@;
+ die $@ if $@;
+ $YAML::UseHeader=0;
+
+ my $dump=Dump({$key => $value});
+ chomp $dump;
+ if (length $prefix) {
+ $dump=join("\n", map { $prefix.$_ } split(/\n/, $dump));
+ }
+ return $dump;
+}
+
+1
diff --git a/IkiWiki/Wrapper.pm b/IkiWiki/Wrapper.pm
index 6555fe625..927368fae 100644
--- a/IkiWiki/Wrapper.pm
+++ b/IkiWiki/Wrapper.pm
@@ -8,6 +8,26 @@ use File::Spec;
use Data::Dumper;
use IkiWiki;
+sub gen_wrappers () {
+ debug(gettext("generating wrappers.."));
+ my %origconfig=(%config);
+ foreach my $wrapper (@{$config{wrappers}}) {
+ %config=(%origconfig, %{$wrapper});
+ $config{verbose}=$config{setupverbose}
+ if exists $config{setupverbose};
+ $config{syslog}=$config{setupsyslog}
+ if exists $config{setupsyslog};
+ delete @config{qw(setupsyslog setupverbose wrappers genwrappers rebuild)};
+ checkconfig();
+ if (! $config{cgi} && ! $config{post_commit} &&
+ ! $config{test_receive}) {
+ $config{post_commit}=1;
+ }
+ gen_wrapper();
+ }
+ %config=(%origconfig);
+}
+
sub gen_wrapper () {
$config{srcdir}=File::Spec->rel2abs($config{srcdir});
$config{destdir}=File::Spec->rel2abs($config{destdir});
@@ -37,12 +57,9 @@ sub gen_wrapper () {
addenv("$var", s);
EOF
}
-
- my $test_receive="";
- if ($config{test_receive}) {
- require IkiWiki::Receive;
- $test_receive=IkiWiki::Receive::gen_wrapper();
- }
+
+ my @wrapper_hooks;
+ run_hooks(genwrapper => sub { push @wrapper_hooks, shift->() });
my $check_commit_hook="";
my $pre_exec="";
@@ -76,17 +93,23 @@ EOF
# otherwise. The fd of the lock is stored in
# IKIWIKI_CGILOCK_FD so unlockwiki can close it.
$pre_exec=<<"EOF";
- {
- int fd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR, 0666);
- if (fd != -1 && flock(fd, LOCK_EX) == 0) {
- char *fd_s;
- asprintf(&fd_s, "%i", fd);
- setenv("IKIWIKI_CGILOCK_FD", fd_s, 1);
- }
+ lockfd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR, 0666);
+ if (lockfd != -1 && flock(lockfd, LOCK_EX) == 0) {
+ char *fd_s=malloc(8);
+ sprintf(fd_s, "%i", lockfd);
+ setenv("IKIWIKI_CGILOCK_FD", fd_s, 1);
}
EOF
}
+ my $set_background_command='';
+ if (defined $config{wrapper_background_command} &&
+ length $config{wrapper_background_command}) {
+ my $background_command=delete $config{wrapper_background_command};
+ $set_background_command=~s/"/\\"/g;
+ $set_background_command='#define BACKGROUND_COMMAND "'.$background_command.'"';
+ }
+
$Data::Dumper::Indent=0; # no newlines
my $configstring=Data::Dumper->Dump([\%config], ['*config']);
$configstring=~s/\\/\\\\/g;
@@ -108,7 +131,7 @@ extern char **environ;
char *newenviron[$#envsave+6];
int i=0;
-addenv(char *var, char *val) {
+void addenv(char *var, char *val) {
char *s=malloc(strlen(var)+1+strlen(val)+1);
if (!s)
perror("malloc");
@@ -117,15 +140,27 @@ addenv(char *var, char *val) {
}
int main (int argc, char **argv) {
+ int lockfd=-1;
char *s;
$check_commit_hook
-$test_receive
+@wrapper_hooks
$envsave
newenviron[i++]="HOME=$ENV{HOME}";
newenviron[i++]="WRAPPED_OPTIONS=$configstring";
+
+#ifdef __TINYC__
+ /* old tcc versions do not support modifying environ directly */
+ if (clearenv() != 0) {
+ perror("clearenv");
+ exit(1);
+ }
+ for (; i>0; i--)
+ putenv(newenviron[i-1]);
+#else
newenviron[i]=NULL;
environ=newenviron;
+#endif
if (setregid(getegid(), -1) != 0 &&
setregid(getegid(), -1) != 0) {
@@ -139,15 +174,46 @@ $envsave
}
$pre_exec
+
+$set_background_command
+#ifdef BACKGROUND_COMMAND
+ if (lockfd != -1) {
+ close(lockfd);
+ }
+
+ pid_t pid=fork();
+ if (pid == -1) {
+ perror("fork");
+ exit(1);
+ }
+ else if (pid == 0) {
+ execl("$this", "$this", NULL);
+ perror("exec $this");
+ exit(1);
+ }
+ else {
+ waitpid(pid, NULL, 0);
+
+ if (daemon(1, 0) == 0) {
+ system(BACKGROUND_COMMAND);
+ exit(0);
+ }
+ else {
+ perror("daemon");
+ exit(1);
+ }
+ }
+#else
execl("$this", "$this", NULL);
perror("exec $this");
exit(1);
+#endif
}
EOF
- close OUT;
- my $cc=exists $ENV{CC} ? possibly_foolish_untaint($ENV{CC}) : 'cc';
- if (system($cc, "$wrapper.c", "-o", "$wrapper.new") != 0) {
+ my @cc=exists $ENV{CC} ? possibly_foolish_untaint($ENV{CC}) : 'cc';
+ push @cc, possibly_foolish_untaint($ENV{CFLAGS}) if exists $ENV{CFLAGS};
+ if (system(@cc, "$wrapper.c", "-o", "$wrapper.new") != 0) {
#translators: The parameter is a C filename.
error(sprintf(gettext("failed to compile %s"), "$wrapper.c"));
}