summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--IkiWiki/Plugin/inline.pm7
-rw-r--r--IkiWiki/Plugin/meta.pm72
-rw-r--r--debian/NEWS4
-rw-r--r--debian/changelog11
-rw-r--r--doc/plugins/meta.mdwn15
-rw-r--r--doc/plugins/write.mdwn4
6 files changed, 78 insertions, 35 deletions
diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm
index 53b051816..59eabb606 100644
--- a/IkiWiki/Plugin/inline.pm
+++ b/IkiWiki/Plugin/inline.pm
@@ -152,7 +152,12 @@ sub preprocess_inline (@) { #{{{
# that if they are removed or otherwise changed, the inline will be
# sure to be updated.
add_depends($params{page}, join(" or ", @list));
-
+ # Force a scan of this page so any metadata that appears after this
+ # inline directive is available when inlining. The page normally
+ # wouldn't be scanned if it's only being rebuilt because of a
+ # depedency.
+ IkiWiki::scan($pagesources{$params{page}});
+
my $feednum="";
my $feedid=join("\0", map { $_."\0".$params{$_} } sort keys %params);
diff --git a/IkiWiki/Plugin/meta.pm b/IkiWiki/Plugin/meta.pm
index 88c942fa4..586dbcb81 100644
--- a/IkiWiki/Plugin/meta.pm
+++ b/IkiWiki/Plugin/meta.pm
@@ -16,7 +16,7 @@ my %copyright;
sub import { #{{{
hook(type => "needsbuild", id => "meta", call => \&needsbuild);
- hook(type => "preprocess", id => "meta", call => \&preprocess);
+ hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1);
hook(type => "pagetemplate", id => "meta", call => \&pagetemplate);
} # }}}
@@ -77,13 +77,10 @@ sub preprocess (@) { #{{{
# fully encoded.
$value=decode_entities($value);
+ # Metadata collection that needs to happen during the scan pass.
if ($key eq 'title') {
$title{$page}=HTML::Entities::encode_numeric($value);
}
- elsif ($key eq 'permalink') {
- $permalink{$page}=$value;
- push @{$meta{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />');
- }
elsif ($key eq 'date') {
eval q{use Date::Parse};
if (! $@) {
@@ -91,6 +88,40 @@ sub preprocess (@) { #{{{
$IkiWiki::pagectime{$page}=$time if defined $time;
}
}
+ elsif ($key eq 'license') {
+ push @{$meta{$page}}, '<link rel="license" href="#page_license" />';
+ $license{$page}=$value;
+ return "";
+ }
+ elsif ($key eq 'copyright') {
+ push @{$meta{$page}}, '<link rel="copyright" href="#page_copyright" />';
+ $copyright{$page}=$value;
+ return "";
+ }
+ elsif ($key eq 'link' && ! %params) {
+ # hidden WikiLink
+ push @{$links{$page}}, $value;
+ return "";
+ }
+ elsif ($key eq 'author') {
+ $author{$page}=$value;
+ # fallthorough
+ }
+ elsif ($key eq 'authorurl') {
+ $authorurl{$page}=$value;
+ # fallthrough
+ }
+
+ if (! defined wantarray) {
+ # avoid collecting duplicate data during scan pass
+ return;
+ }
+
+ # Metadata collection that happens only during preprocessing pass.
+ if ($key eq 'permalink') {
+ $permalink{$page}=$value;
+ push @{$meta{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />');
+ }
elsif ($key eq 'stylesheet') {
my $rel=exists $params{rel} ? $params{rel} : "alternate stylesheet";
my $title=exists $params{title} ? $params{title} : $value;
@@ -113,14 +144,6 @@ sub preprocess (@) { #{{{
push @{$meta{$page}}, '<link href="'.encode_entities($value).
'" rel="openid.delegate" />';
}
- elsif ($key eq 'license') {
- push @{$meta{$page}}, '<link rel="license" href="#page_license" />';
- $license{$page}=$value;
- }
- elsif ($key eq 'copyright') {
- push @{$meta{$page}}, '<link rel="copyright" href="#page_copyright" />';
- $copyright{$page}=$value;
- }
elsif ($key eq 'redir') {
return "" if $page ne $destpage;
my $safe=0;
@@ -160,17 +183,17 @@ sub preprocess (@) { #{{{
push @{$meta{$page}}, $redir;
}
elsif ($key eq 'link') {
- return "[[meta ".gettext("link is no longer supported")."]]";
+ if (%params) {
+ $meta{$page}.=scrub("<link href=\"".encode_entities($value)."\" ".
+ join(" ", map {
+ encode_entities($_)."=\"".encode_entities(decode_entities($params{$_}))."\""
+ } keys %params).
+ " />\n");
+ }
}
else {
push @{$meta{$page}}, scrub('<meta name="'.encode_entities($key).
'" content="'.encode_entities($value).'" />');
- if ($key eq 'author') {
- $author{$page}=$value;
- }
- elsif ($key eq 'authorurl') {
- $authorurl{$page}=$value;
- }
}
return "";
@@ -197,15 +220,6 @@ sub pagetemplate (@) { #{{{
if exists $author{$page} && $template->query(name => "author");
$template->param(authorurl => $authorurl{$page})
if exists $authorurl{$page} && $template->query(name => "authorurl");
-
- if ($page ne $destpage &&
- ((exists $license{$page} && ! exists $license{$destpage}) ||
- (exists $copyright{$page} && ! exists $copyright{$destpage}))) {
- # Force a scan of the destpage to get its copyright/license
- # info. If the info is declared after an inline, it will
- # otherwise not be available at this point.
- IkiWiki::scan($pagesources{$destpage});
- }
if (exists $license{$page} && $template->query(name => "license") &&
($page eq $destpage || ! exists $license{$destpage} ||
diff --git a/debian/NEWS b/debian/NEWS
index 203f31e1a..c8228d4bd 100644
--- a/debian/NEWS
+++ b/debian/NEWS
@@ -5,10 +5,6 @@ ikiwiki (2.16) unstable; urgency=low
Redirection pages have been left behind for these moved pages temporarily,
and will be removed later.
- The meta plugin no longer supports setting internal or external links
- with "meta link". Instead, use "meta openid" for openid links, and use tags
- for in-wiki invisible links between pages.
-
If you use the calendar plugin, ikiwiki is now smarter and your nightly
cron job to update the wiki doesn't need to rebuild everything. Just pass
--refresh to ikiwiki in the cron job and it will update only pages that
diff --git a/debian/changelog b/debian/changelog
index d5117d867..db64abe81 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -2,7 +2,18 @@ ikiwiki (2.20) UNRELEASED; urgency=low
* inline: Add copyright/license info on a per-post basis to atom
feeds if available. (rss doesn't allow such info on a per-post basis)
+ * Also include overall copyright/license and author info in atom feeds if
+ available.
* meta: Allow copyright/license metadata to contain arbitrary markup.
+ * Call preprocessor hooks in void context during the scan pass. This allows
+ the hook to determine if it's just scanning, and avoid expensive
+ operations.
+ * img: Detect scan mode and avoid generating and writing the image file
+ during it, for a 2x speedup.
+ * meta: Run in scan mode again (more intelligently) and re-add support for
+ meta link.
+ * Fix support for the case where metadata appears after an inline directive.
+ This was broken in version 2.16.
-- Joey Hess <joeyh@debian.org> Wed, 09 Jan 2008 00:34:46 -0500
diff --git a/doc/plugins/meta.mdwn b/doc/plugins/meta.mdwn
index ec4348e41..0bcd11b67 100644
--- a/doc/plugins/meta.mdwn
+++ b/doc/plugins/meta.mdwn
@@ -69,6 +69,21 @@ Supported fields:
\\[[meta openid="http://joeyh.myopenid.com/"
server="http://www.myopenid.com/server"]]
+* link
+
+ Specifies a link to another page. This can be used as a way to make the
+ wiki treat one page as linking to another without displaying a user-visible
+ [[ikiwiki/WikiLink]]:
+
+ \[[meta link=otherpage]]
+
+ It can also be used to insert a html &lt;link&gt; tag. For example:
+
+ \[[meta link="http://joeyh.myopenid.com/" rel="openid.delegate"]]
+
+ However, this latter syntax won't be allowed if the [[htmlscrubber]] is
+ enabled, since it can be used to insert unsafe content.
+
* redir
Causes the page to redirect to another page in the wiki.
diff --git a/doc/plugins/write.mdwn b/doc/plugins/write.mdwn
index 34caf83f6..0da425402 100644
--- a/doc/plugins/write.mdwn
+++ b/doc/plugins/write.mdwn
@@ -123,7 +123,9 @@ An optional "scan" parameter, if set to a true value, makes the hook be
called during the preliminary scan that ikiwiki makes of updated pages,
before begining to render pages. This parameter should be set to true if
the hook modifies data in `%links`. Note that doing so will make the hook
-be run twice per page build, so avoid doing it for expensive hooks.
+be run twice per page build, so avoid doing it for expensive hooks. (As an
+optimisation, if your preprocessor hook is called in a void contets, you
+can assume it's being run in scan mode.)
Note that if the [[htmlscrubber]] is enabled, html in
[[ikiwiki/PreProcessorDirective]] output is sanitised, which may limit what