diff options
author | intrigeri <intrigeri@boum.org> | 2008-12-30 18:24:36 +0100 |
---|---|---|
committer | intrigeri <intrigeri@boum.org> | 2008-12-30 18:24:36 +0100 |
commit | 21add7ffa87a5e622d18bdbb24c638c15bdb3800 (patch) | |
tree | 6780aa6dbee71f18db64e44b012e789fc8197d4b /doc | |
parent | 3190e5cea75a43d38f58b8a45fbc87d5527d18f0 (diff) | |
parent | 3032909090711c86c5056987043eeff5a1f6aec2 (diff) |
Merge commit 'upstream/master' into prv/po
Conflicts:
debian/control
debian/copyright
doc/ikiwiki/pagespec.mdwn
Signed-off-by: intrigeri <intrigeri@boum.org>
Diffstat (limited to 'doc')
161 files changed, 2379 insertions, 496 deletions
diff --git a/doc/bugs/Allow_overriding_of_symlink_restriction.mdwn b/doc/bugs/Allow_overriding_of_symlink_restriction.mdwn index 07badd646..efdd9004e 100644 --- a/doc/bugs/Allow_overriding_of_symlink_restriction.mdwn +++ b/doc/bugs/Allow_overriding_of_symlink_restriction.mdwn @@ -34,9 +34,9 @@ Is there a huge objection to this patch? index 990fcaa..0fb78ba 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm - @@ -260,13 +260,15 @@ sub prune ($) { #{{{ + @@ -260,13 +260,15 @@ sub prune ($) { - sub refresh () { #{{{ + sub refresh () { # security check, avoid following symlinks in the srcdir path - my $test=$config{srcdir}; - while (length $test) { @@ -108,7 +108,7 @@ like this being accepted before I bothered. use IkiWiki; +use File::Spec; - sub gen_wrapper () { #{{{ + sub gen_wrapper () { - $config{srcdir}=abs_path($config{srcdir}); - $config{destdir}=abs_path($config{destdir}); - my $this=abs_path($0); diff --git a/doc/bugs/Can__39__t_create_root_page.mdwn b/doc/bugs/Can__39__t_create_root_page.mdwn index 60cbcd530..91c2eae60 100644 --- a/doc/bugs/Can__39__t_create_root_page.mdwn +++ b/doc/bugs/Can__39__t_create_root_page.mdwn @@ -33,7 +33,7 @@ This type of page name (with leading slash) also gets created by the aggregate p index 99cead6..23d9616 100644 --- a/IkiWiki/CGI.pm +++ b/IkiWiki/CGI.pm - @@ -305,9 +305,11 @@ sub cgi_editpage ($$) { #{{{ + @@ -305,9 +305,11 @@ sub cgi_editpage ($$) { my $page=$form->field('page'); $page=possibly_foolish_untaint($page); if (! defined $page || ! length $page || @@ -46,7 +46,7 @@ This type of page name (with leading slash) also gets created by the aggregate p my $baseurl=$config{url}."/".htmlpage($page); - @@ -425,6 +427,7 @@ sub cgi_editpage ($$) { #{{{ + @@ -425,6 +427,7 @@ sub cgi_editpage ($$) { $from ne $form->field('from') || file_pruned($from, $config{srcdir}) || $from=~/^\// || diff --git a/doc/bugs/Comments_link_is_to_index.html_if_usedirs_is_on.mdwn b/doc/bugs/Comments_link_is_to_index.html_if_usedirs_is_on.mdwn new file mode 100644 index 000000000..6df3ccd9c --- /dev/null +++ b/doc/bugs/Comments_link_is_to_index.html_if_usedirs_is_on.mdwn @@ -0,0 +1,5 @@ +When a page links to its own #comments anchor you get a link like +"index.html#comments" rather than "./#comments". Fixed in commit 0844bd0b +on my 'comments' branch. --[[smcv]] + +[[!tag patch done]] diff --git a/doc/bugs/IkiWiki::Wrapper_should_use_destdir.mdwn b/doc/bugs/IkiWiki::Wrapper_should_use_destdir.mdwn new file mode 100644 index 000000000..6b02c4186 --- /dev/null +++ b/doc/bugs/IkiWiki::Wrapper_should_use_destdir.mdwn @@ -0,0 +1,23 @@ +In IkiWiki/Wrapper.pm, the gen_wrapper function finds out what srcdir and +destdir are set to in the config, but does not use them. + +Later in the sub, when a new wiki.cgi wrapper is being created when calling +ikiwiki --setup /path/to/setup, it will only work if cgi\_wrapper in the +config file is set to the full path. Otherwise, it creates wiki.cgi in the +current working directory. It works with the other wrapper it sets up in +my config - post\_update (using git), as that shows in the config with a +full path. + +One workaround would be to mention in the setup file that cgi_wrapper has +to be the full path, not just the file name, but that seems silly when +destdir is also specified in that file and that's where it should go, and +$config{destdir} is a known value in the Wrapper.pm file. + +> Nowhere in any documentation does +> it say that cgi\_wrapper is relative to the destdir. +> As noted in [[discussion]], there are web server setups +> that require the cgi be located elsewhere. +> [[done]] --[[Joey]] + +>> A comment in the generated setup file that all paths should be full +>> would prevent my (admittedly dumb) error without any drawbacks. diff --git a/doc/bugs/IkiWiki::Wrapper_should_use_destdir/discussion.mdwn b/doc/bugs/IkiWiki::Wrapper_should_use_destdir/discussion.mdwn new file mode 100644 index 000000000..870fa7a66 --- /dev/null +++ b/doc/bugs/IkiWiki::Wrapper_should_use_destdir/discussion.mdwn @@ -0,0 +1,4 @@ +Just as a point of information, I do not put my cgi wrapper in the dest +directory. Instead I configure Apache to relate a specific URI to the cgi via +ScriptAlias. I would not like things to be changed so that the cgi was put in +the destdir, so I'd vote instead to comment in the `setup\_file`. -- [[Jon]] diff --git a/doc/bugs/Insecure_dependency_in_eval_while_running_with_-T_switch.mdwn b/doc/bugs/Insecure_dependency_in_eval_while_running_with_-T_switch.mdwn index 28b48e2c6..c3beb8219 100644 --- a/doc/bugs/Insecure_dependency_in_eval_while_running_with_-T_switch.mdwn +++ b/doc/bugs/Insecure_dependency_in_eval_while_running_with_-T_switch.mdwn @@ -53,7 +53,7 @@ I didn't apply your following old patch against `Ikiwiki.pm` file: + } + + return eval $newpagespec; - } #}}} + } package IkiWiki::PageSpec; @@ -83,7 +83,7 @@ to break the code I distribute in my backport ;) + my $ret=eval possibly_foolish_untaint(pagespec_translate($spec)); return IkiWiki::FailReason->new("syntax error") if $@; return $ret; - } #}}} + } >> Thanks a lot, Joey! It works :) >> diff --git a/doc/bugs/Meta_plugin_does_not_respect_htmlscrubber__95__skip_setting.___40__patch__41__.mdwn b/doc/bugs/Meta_plugin_does_not_respect_htmlscrubber__95__skip_setting.___40__patch__41__.mdwn new file mode 100644 index 000000000..0e40da551 --- /dev/null +++ b/doc/bugs/Meta_plugin_does_not_respect_htmlscrubber__95__skip_setting.___40__patch__41__.mdwn @@ -0,0 +1,11 @@ +I have been trying to include some meta info using the link setting something like the below + + meta link="http://www.example.com/" rel="command" name="Example" + +This gets removed by the htmlscrubber as you would expect. + +Setting htmlscrubber_skip to the pagespec should stop this getting scrubbed but it does not. + +Below is a patch to fix that. It seams to work but I am not sure of it is the correct thing to do. + +> [[done]], thanks for the patch --[[Joey]] diff --git a/doc/bugs/Monotone_rcs_support.mdwn b/doc/bugs/Monotone_rcs_support.mdwn index 3d1388312..8687e7983 100644 --- a/doc/bugs/Monotone_rcs_support.mdwn +++ b/doc/bugs/Monotone_rcs_support.mdwn @@ -11,7 +11,7 @@ diff --git a/IkiWiki/Rcs/monotone.pm b/IkiWiki/Rcs/monotone.pm index cde6029..34f8f96 100644 --- a/IkiWiki/Rcs/monotone.pm +++ b/IkiWiki/Rcs/monotone.pm -@@ -186,8 +186,9 @@ sub rcs_update () { #{{{ +@@ -186,8 +186,9 @@ sub rcs_update () { check_config(); if (defined($config{mtnsync}) && $config{mtnsync}) { diff --git a/doc/bugs/No_link_for_blog_items_when_filename_contains_a_colon.mdwn b/doc/bugs/No_link_for_blog_items_when_filename_contains_a_colon.mdwn index 019970899..bb3f92f9c 100644 --- a/doc/bugs/No_link_for_blog_items_when_filename_contains_a_colon.mdwn +++ b/doc/bugs/No_link_for_blog_items_when_filename_contains_a_colon.mdwn @@ -38,19 +38,19 @@ At the moment I see two possible solutions: +++ IkiWiki.pm 2008-07-21 20:41:35.000000000 +0200 @@ -477,13 +477,13 @@ - sub titlepage ($) { #{{{ + sub titlepage ($) { my $title=shift; - $title=~s/([^-[:alnum:]:+\/.])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg; + $title=~s/([^-[:alnum:]+\/.])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg; return $title; - } #}}} + } - sub linkpage ($) { #{{{ + sub linkpage ($) { my $link=shift; - $link=~s/([^-[:alnum:]:+\/._])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg; + $link=~s/([^-[:alnum:]+\/._])/$1 eq ' ' ? '_' : "__".ord($1)."__"/eg; return $link; - } #}}} + } What do you think about that? Does the patch have any side-effects I didn't see? diff --git a/doc/bugs/Problem_with_toc.pm_plug-in.mdwn b/doc/bugs/Problem_with_toc.pm_plug-in.mdwn index 8ae347d42..6be5f89b5 100644 --- a/doc/bugs/Problem_with_toc.pm_plug-in.mdwn +++ b/doc/bugs/Problem_with_toc.pm_plug-in.mdwn @@ -9,7 +9,7 @@ Here is a patch for toc.pm for producing non-empty 'a' elements. --- IkiWiki/Plugin/toc.pm.orig Thu Jun 7 11:53:53 2007 +++ IkiWiki/Plugin/toc.pm Thu Jun 7 13:00:00 2007 - @@ -47,7 +47,7 @@ sub format (@) { #{{{ + @@ -47,7 +47,7 @@ sub format (@) { if ($tagname =~ /^h(\d+)$/i) { my $level=$1; my $anchor="index".++$anchors{$level}."h$level"; @@ -18,7 +18,7 @@ Here is a patch for toc.pm for producing non-empty 'a' elements. # Take the first header level seen as the topmost level, # even if there are higher levels seen later on. - @@ -90,6 +90,16 @@ sub format (@) { #{{{ + @@ -90,6 +90,16 @@ sub format (@) { "</a>\n"; $p->handler(text => undef); }, "dtext"); diff --git a/doc/bugs/Problems_with_graphviz.pm_plug-in.mdwn b/doc/bugs/Problems_with_graphviz.pm_plug-in.mdwn index 9a26e505a..c9f698158 100644 --- a/doc/bugs/Problems_with_graphviz.pm_plug-in.mdwn +++ b/doc/bugs/Problems_with_graphviz.pm_plug-in.mdwn @@ -15,7 +15,7 @@ It also generates image URLs relative to the page being rendered, which means th --- IkiWiki/Plugin/graphviz.pm.orig 2007-07-27 11:35:05.000000000 +0200 +++ IkiWiki/Plugin/graphviz.pm 2007-07-27 11:36:02.000000000 +0200 - @@ -69,7 +69,12 @@ sub render_graph (\%) { #{{{ + @@ -69,7 +69,12 @@ sub render_graph (\%) { } } @@ -26,9 +26,9 @@ It also generates image URLs relative to the page being rendered, which means th + else { + return "<img src=\"".urlto($dest, $params{page})."\" />\n"; + } - } #}}} + } - sub graph (@) { #{{{ + sub graph (@) { >> --[[HenrikBrixAndersen]] @@ -38,7 +38,7 @@ The patch below fixes these two issues. --- graphviz.pm.orig Thu Jun 7 15:45:16 2007 +++ graphviz.pm Fri Jun 8 12:03:38 2007 - @@ -41,7 +41,6 @@ sub render_graph (\%) { #{{{ + @@ -41,7 +41,6 @@ sub render_graph (\%) { $pid=open2(*IN, *OUT, "$params{prog} -Tpng"); # open2 doesn't respect "use open ':utf8'" @@ -46,7 +46,7 @@ The patch below fixes these two issues. binmode (OUT, ':utf8'); print OUT $src; - @@ -70,7 +69,12 @@ sub render_graph (\%) { #{{{ + @@ -70,7 +69,12 @@ sub render_graph (\%) { } } @@ -57,6 +57,6 @@ The patch below fixes these two issues. + else { + return "<img src=\"".urlto($dest, $params{page})."\" />\n"; + } - } #}}} + } - sub graph (@) { #{{{ + sub graph (@) { diff --git a/doc/bugs/RecentChanges_broken_with_empty_svnpath.mdwn b/doc/bugs/RecentChanges_broken_with_empty_svnpath.mdwn index 836c39a71..c852df5e9 100644 --- a/doc/bugs/RecentChanges_broken_with_empty_svnpath.mdwn +++ b/doc/bugs/RecentChanges_broken_with_empty_svnpath.mdwn @@ -13,7 +13,7 @@ I can not see why this check is needed in the first place, so here's a patch for diff -upr ikiwiki-1.49.orig/IkiWiki/Rcs/svn.pm ikiwiki-1.49/IkiWiki/Rcs/svn.pm --- ikiwiki-1.49.orig/IkiWiki/Rcs/svn.pm Mon Apr 16 15:15:09 2007 +++ ikiwiki-1.49/IkiWiki/Rcs/svn.pm Mon Apr 16 15:15:47 2007 - @@ -176,7 +176,6 @@ sub rcs_recentchanges ($) { #{{{ + @@ -176,7 +176,6 @@ sub rcs_recentchanges ($) { } foreach (keys %{$logentry->{paths}}) { diff --git a/doc/bugs/Spaces_in_link_text_for_ikiwiki_links.mdwn b/doc/bugs/Spaces_in_link_text_for_ikiwiki_links.mdwn index f6dbacad7..8aea5cd29 100644 --- a/doc/bugs/Spaces_in_link_text_for_ikiwiki_links.mdwn +++ b/doc/bugs/Spaces_in_link_text_for_ikiwiki_links.mdwn @@ -44,7 +44,7 @@ reported in [[index/discussion#index11h1]]. >> >> If there was ever a future, syntax-breaking major release of ikiwiki >> (similar to python3000) I'd like to see this fixed as part of that. ->> --[[JonDowland]] +>> --[[users/Jon]] >>> You can enable `prefix_directives` and get the disambiguated behavior >>> and spaces in wikilinks today. It will become the default in 3.0. diff --git a/doc/bugs/Titles_are_lower-cased_when_creating_a_page.mdwn b/doc/bugs/Titles_are_lower-cased_when_creating_a_page.mdwn index cc53c0aea..f2c60309b 100644 --- a/doc/bugs/Titles_are_lower-cased_when_creating_a_page.mdwn +++ b/doc/bugs/Titles_are_lower-cased_when_creating_a_page.mdwn @@ -6,7 +6,7 @@ If I click on "Czars in Russia", I'd like Ikiwiki to create "Czars\_in\_Russia.m > --- a/IkiWiki.pm > +++ b/IkiWiki.pm -> @@ -584,7 +584,7 @@ sub htmllink ($$$;@) { #{{{ +> @@ -584,7 +584,7 @@ sub htmllink ($$$;@) { > return "<span class=\"createlink\"><a href=\"". > cgiurl( > do => "create", diff --git a/doc/bugs/Warns_about_use_of_uninitialized_value_if_prefix__95__directives_is_on_and_a_directive_does_not_contain_a_space.mdwn b/doc/bugs/Warns_about_use_of_uninitialized_value_if_prefix__95__directives_is_on_and_a_directive_does_not_contain_a_space.mdwn index a30f110a4..efb5c70b8 100644 --- a/doc/bugs/Warns_about_use_of_uninitialized_value_if_prefix__95__directives_is_on_and_a_directive_does_not_contain_a_space.mdwn +++ b/doc/bugs/Warns_about_use_of_uninitialized_value_if_prefix__95__directives_is_on_and_a_directive_does_not_contain_a_space.mdwn @@ -6,7 +6,7 @@ In `IkiWiki::preprocess`, the last capturing group in the regex used to parse di index 241a7c0..d2c35a2 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -1167,7 +1167,8 @@ sub preprocess ($$$;$$) { #{{{ + @@ -1167,7 +1167,8 @@ sub preprocess ($$$;$$) { }sx; } @@ -14,6 +14,6 @@ In `IkiWiki::preprocess`, the last capturing group in the regex used to parse di + # $4 can be undef if the directive was \[[!foo]] + $content =~ s{$regex}{$handle->($1, $2, $3, ($4 or ""))}eg; return $content; - } #}}} + } [[cherry-picked|done]] --[[Joey]] diff --git a/doc/bugs/beautify__95__urlpath_will_add_.__47___even_if_it_is_already_present.mdwn b/doc/bugs/beautify__95__urlpath_will_add_.__47___even_if_it_is_already_present.mdwn new file mode 100644 index 000000000..8e96b1f56 --- /dev/null +++ b/doc/bugs/beautify__95__urlpath_will_add_.__47___even_if_it_is_already_present.mdwn @@ -0,0 +1,3 @@ +beautify_urlpath will prepend a useless "./" to the URL "./foo". Fixed in commit 5b1cf21a on my comments branch. --[[smcv]] + +[[!tag patch done]] diff --git a/doc/bugs/bugfix_for:___34__mtn:_operation_canceled:_Broken_pipe__34_____40__patch__41__.mdwn b/doc/bugs/bugfix_for:___34__mtn:_operation_canceled:_Broken_pipe__34_____40__patch__41__.mdwn new file mode 100644 index 000000000..b7f38fd29 --- /dev/null +++ b/doc/bugs/bugfix_for:___34__mtn:_operation_canceled:_Broken_pipe__34_____40__patch__41__.mdwn @@ -0,0 +1,24 @@ +When using monotone as revision control system, a "mtn: operation canceled: Broken pipe" message is printed. Reason is that, in a call to mtn, the pipe is closed before mtn has done all its output. This patch fixes the problem. + + diff -up ikiwiki/IkiWiki/Plugin/monotone.pm.orig ikiwiki/IkiWiki/Plugin/monotone.pm + --- ikiwiki/IkiWiki/Plugin/monotone.pm.orig 2008-11-12 23:45:24.000000000 +0100 + +++ ikiwiki/IkiWiki/Plugin/monotone.pm 2008-12-16 12:41:38.000000000 +0100 + @@ -525,13 +525,12 @@ sub rcs_recentchanges ($) { + my $child = open(MTNLOG, "-|"); + if (! $child) { + exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", + - "--brief") || error("mtn log failed to run"); + + "--brief", "--last=$num") || error("mtn log failed to run"); + } + + - while (($num >= 0) and (my $line = <MTNLOG>)) { + + while (my $line = <MTNLOG>) { + if ($line =~ m/^($sha1_pattern)/) { + push @revs, $1; + - $num -= 1; + } + } + close MTNLOG || debug("mtn log exited $?"); + +> Thanks for the patch, and for testing the monotone backend. +> applied [[done]] --[[Joey]] diff --git a/doc/bugs/comments_produce_broken_links_in_RecentChanges.mdwn b/doc/bugs/comments_produce_broken_links_in_RecentChanges.mdwn new file mode 100644 index 000000000..dae00857b --- /dev/null +++ b/doc/bugs/comments_produce_broken_links_in_RecentChanges.mdwn @@ -0,0 +1,9 @@ +Comments produce links like `sandbox/comment_1` in [[RecentChanges]], which, +when clicked, redirect to a page that does not exist. + +The `recentchanges` branch in my repository contains one possible [[patch]], +which causes the CGI to go to the [[ikiwiki/directive/meta]] `permalink`, if +any, if the page is internal (comments do have a permalink). + +> [[done]].. I I had thought about not showing internal page changes at +> all, but I like your approach better --[[Joey]] diff --git a/doc/bugs/dumpsetup_does_not_save_destdir.mdwn b/doc/bugs/dumpsetup_does_not_save_destdir.mdwn new file mode 100644 index 000000000..768c3fc5e --- /dev/null +++ b/doc/bugs/dumpsetup_does_not_save_destdir.mdwn @@ -0,0 +1,3 @@ +Calling ikiwiki with a bunch of options, including the --dumpsetup somefile.setup option creates somefile.setup for later reuse with the --setup option. The destination dir however is not saved in the setup file, it has destdir => ''. + +> that broke in version 2.64 .. fixed [[done]] --[[Joey]] diff --git a/doc/bugs/git_stderr_output_causes_problems.mdwn b/doc/bugs/git_stderr_output_causes_problems.mdwn index 4146a5869..c25ef6927 100644 --- a/doc/bugs/git_stderr_output_causes_problems.mdwn +++ b/doc/bugs/git_stderr_output_causes_problems.mdwn @@ -6,7 +6,7 @@ Ikiwiki's git handling is sending a bunch of output to stderr. The following pa index 425536f..5734bb2 100644 --- a/IkiWiki/Rcs/git.pm +++ b/IkiWiki/Rcs/git.pm - @@ -24,6 +24,7 @@ sub _safe_git (&@) { #{{{ + @@ -24,6 +24,7 @@ sub _safe_git (&@) { if (!$pid) { # In child. # Git commands want to be in wc. diff --git a/doc/bugs/img_plugin_should_pass_through_class_attribute.mdwn b/doc/bugs/img_plugin_should_pass_through_class_attribute.mdwn index 2e67d6357..f72ecade2 100644 --- a/doc/bugs/img_plugin_should_pass_through_class_attribute.mdwn +++ b/doc/bugs/img_plugin_should_pass_through_class_attribute.mdwn @@ -26,7 +26,7 @@ And here's a patch to implement it. Will this survive markdown munging? It seems index 7226231..3eb1ae7 100644 --- a/Plugin/img.pm +++ b/Plugin/img.pm - @@ -93,9 +93,15 @@ sub preprocess (@) { #{{{ + @@ -93,9 +93,15 @@ sub preprocess (@) { $imgurl="$config{url}/$imglink"; } @@ -42,7 +42,7 @@ And here's a patch to implement it. Will this survive markdown munging? It seems + $result .= '/></a>'; + + return $result; - } #}}} + } 1 -- diff --git a/doc/bugs/inline_sort-by-title_issues.mdwn b/doc/bugs/inline_sort-by-title_issues.mdwn index 884846b32..ff4555067 100644 --- a/doc/bugs/inline_sort-by-title_issues.mdwn +++ b/doc/bugs/inline_sort-by-title_issues.mdwn @@ -23,7 +23,7 @@ And here is a [[patch]] for this. It makes `sort=title` actually sort on the ti index 9c336e7..99f6de3 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm - @@ -185,9 +185,12 @@ sub preprocess_inline (@) { #{{{ + @@ -185,9 +185,12 @@ sub preprocess_inline (@) { } } diff --git a/doc/bugs/inline_sort_order_and_meta_date_value.mdwn b/doc/bugs/inline_sort_order_and_meta_date_value.mdwn new file mode 100644 index 000000000..d4ec8f345 --- /dev/null +++ b/doc/bugs/inline_sort_order_and_meta_date_value.mdwn @@ -0,0 +1,314 @@ +I have a directory containing two files. f1 (<http://alcopop.org/~jon/repro2/src/blog/debgtd.html>) has + + meta date="2008-07-02 14:13:17" + +f2 (<http://alcopop.org/~jon/repro2/src/blog/moving.html>) has + + meta date="2008-07-02 21:04:21" + +They have both been modified recently: + + >>> stat(f1) + (33188, 459250L, 65027L, 1, 1000, 1000, 1686L, 1227967177, 1227966706, 1227966706) + >>> stat(f2) + (33188, 458868L, 65027L, 1, 1000, 1000, 938L, 1227967187, 1227966705, 1227966705) + +Note that f1 is fractionally newer than f2 in terms of ctime and mtime, but f2 is much newer in terms of the meta information. + +Another page includes them both via inline: + + inline pages="blog/*" show=5 + +The resulting page is rendered with f1 above f2, seemingly not using the meta directive information: <http://alcopop.org/~jon/repro2/dest/blog/>. The footer in the inline pages does use the correct time e.g. <em>Posted Wed 02 Jul 2008 14:13:17 BST</em>. + +If I instead include them using creation_year in the pagespec, they are ordered correctly. + +<http://alcopop.org/~jon/repro2/> contains the src used to reproduce this, the precise ikiwiki invocation (inside Makefile) and the results (dest). + +-- [[users/Jon]] + + +> On Ikiwiki 2.53.3 (Debian Lenny), my inlines are also sorted using mtime +> by default -- despite what the [[documentation|/ikiwiki/directive/inline]] +> says -- but setting the supposed default sort order manually produces the +> correct results. For example, the following inline sorts my blog +> entires using their meta date or ctime: +> +> inline pages="blog/*" show="10" sort="age" +> +> I'll try to look at the code this weekend and see if age is really the +> default sort order. +> +> -- [David A. Harding](http://dtrt.org), 2008-12-20 + +Here is the code. As you can see, sort="age" is equivilant to leaving +out the sort option. --[[Joey]] + + if (exists $params{sort} && $params{sort} eq 'title') { + @list=sort { pagetitle(basename($a)) cmp pagetitle(basename($b)) } @list; + } + elsif (exists $params{sort} && $params{sort} eq 'mtime') { + @list=sort { $pagemtime{$b} <=> $pagemtime{$a} } @list; + } + elsif (! exists $params{sort} || $params{sort} eq 'age') { + @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list; + } + else { + return sprintf(gettext("unknown sort type %s"), $params{sort}); + } + +> On further testing, I find that the bug is limited to the first time +> creation time should be used and has nothing to do with setting the sort +> parameter. Revised steps to reproduce: --[David A. Harding](http://dtrt.org), 2008-12-20 +> +> 1. Create pages that sort different by mtime and ctime +> +> 2. inline pages="somepages/*" +> +> 3. ikiwiki --setup setup_file +> +> 4. Pages are output incorrectly in mtime order +> +> 5. ikiwiki --setup setup_file +> +> 6. Pages are output correctly in ctime order +> +> 7. Create new page in somepages/, set its ctime to earlier than another +> page in sompages/ +> +> 8. ikiwiki --setup setup_file +> +> 9. All previously sorted pages output correctly in ctime order but new +> page is output incorrectly at the top as if its mtime was its ctime +> +> 10. ikiwiki --setup setup_file +> +> 11. All pages, including new page, are output correctly in ctime order + +You're confusing ctime and creation time. This is perhaps not suprising, as +ikiwiki uses the term 'ctime' to refer to creation time. However, the unix +ctime value is not the same thing. Unix ctime can change if a file changes +owner, or in some cases, permissions. Unix ctime also always changes +when the file is modified. Ikiwiki wants a first creation date of the file, +and it accomplishes this by recording the initial ctime of a file the first +time it processes it, and *preserving* that creation time forever, ignoring +later ctime changes. + +I suspect that this, coupled with the fact that ikiwiki sorts newest pages +first, explains everything you describe. If not, please send me a shell script +test case I can run, as instructions like "Create pages that sort different by +mtime and ctime" are not ones that I know how to follow (given that touch sets +*both*). --[[Joey]] + +> Sorry. I conflated Unix ctime and ikiwiki's creation time because I +> didn't think the difference was important to this case. I'm a writer, +> and I should have known better -- I appologize. Revised steps to +> reproduce are below; feel free to delete this whole misunderstanding +> to make the bug report more concise. +> +> 1. Create pages in the srcdir that should sort in one order by +> last-modification time and in a diffent order by original creation +> time. For example: +> +> $ echo -e '\[[!meta date="2007-01-01"]]\nNew.' > test/new.mdwn +> $ echo -e '\[[!meta date="2006-01-01"]]\nOld.' > test/old.mdwn +> +> 2. Create a page that includes the files. For example: +> +> +> $ echo '\[[!inline pages="test/*"]]' > sort-test.mdwn +> +> 3. Run ikiwiki. For example `ikiwiki --setup setup_file` +> +> 4. Pages are output incorrectly in modification time order. For example, +> actual partial HTML of the sort-test/index.html from commands used +> above (whitespace-only lines removed; one whitespace-only line +> added): +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/old/">old</a> +> </span> +> <p>Old.</p> +> <span class="pagedate"> +> Posted Sun 01 Jan 2006 12:00:00 AM EST +> </span> +> </div> +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/new/">new</a> +> </span> +> <p>New.</p> +> <span class="pagedate"> +> Posted Mon 01 Jan 2007 12:00:00 AM EST +> </span> +> </div> +> +> 5. Run ikiwiki again with the same command line. For example: `ikiwiki --setup setup_file` +> +> 6. Pages are output correctly in creation time order. For example, +> actual partial HTML of the sort-test/index.html from commands used +> above (whitespace-only lines removed; one whitespace-only line +> added): +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/new/">new</a> +> </span> +> <p>New.</p> +> <span class="pagedate"> +> Posted Mon 01 Jan 2007 12:00:00 AM EST +> </span> +> </div> +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/old/">old</a> +> </span> +> <p>Old.</p> +> <span class="pagedate"> +> Posted Sun 01 Jan 2006 12:00:00 AM EST +> </span> +> </div> +> +> 7. Create a new page with the current Unix mtime and Unix ctime, but a +> !meta date before the most recent creation date of another page. +> For example: +> +> $ echo -e '\[[!meta date="2005-01-01"]]\nOlder.' > test/older.mdwn +> +> 8. Run ikiwiki again with the same command line. For example: `ikiwiki --setup setup_file` +> +> 9. All previously sorted pages output correctly in order of their +> creation time, but the new page is output incorrectly at the top as +> if its modification time was its creation time. For example, +> actual partial HTML of the sort-test/index.html from commands used +> above (whitespace-only lines removed; two whitespace-only +> lines added): +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/older/">older</a> +> </span> +> <p>Older.</p> +> <span class="pagedate"> +> Posted Sat 01 Jan 2005 12:00:00 AM EST +> </span> +> </div> +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/new/">new</a> +> </span> +> <p>New.</p> +> <span class="pagedate"> +> Posted Mon 01 Jan 2007 12:00:00 AM EST +> </span> +> </div> +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/old/">old</a> +> </span> +> <p>Old.</p> +> <span class="pagedate"> +> Posted Sun 01 Jan 2006 12:00:00 AM EST +> </span> +> </div> +> +> 10. Run ikiwiki again with the same command line. For example: `ikiwiki --setup setup_file` +> +> 11. All pages, including new page, are output correctly in creation time +> order. For example, actual partial HTML of the sort-test/index.html +> from commands used above (whitespace-only lines removed; two +> whitespace-only lines added): +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/new/">new</a> +> </span> +> <p>New.</p> +> <span class="pagedate"> +> Posted Mon 01 Jan 2007 12:00:00 AM EST +> </span> +> </div> +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/old/">old</a> +> </span> +> <p>Old.</p> +> <span class="pagedate"> +> Posted Sun 01 Jan 2006 12:00:00 AM EST +> </span> +> </div> +> +> <div class="inlinepage"> +> <span class="header"> +> <a href="./../test/older/">older</a> +> </span> +> <p>Older.</p> +> <span class="pagedate"> +> Posted Sat 01 Jan 2005 12:00:00 AM EST +> </span> +> </div> +> +> File status after all the above actions: +> +> $ stat test/* +> File: `test/new.mdwn' +> Size: 33 Blocks: 8 IO Block: 4096 regular file +> Device: ca20h/51744d Inode: 684160 Links: 1 +> Access: (0644/-rw-r--r--) Uid: ( 1000/ harding) Gid: ( 1000/ harding) +> Access: 2008-12-20 21:48:32.000000000 -0500 +> Modify: 2008-12-20 21:27:03.000000000 -0500 +> Change: 2008-12-20 21:27:03.000000000 -0500 +> File: `test/older.mdwn' +> Size: 35 Blocks: 8 IO Block: 4096 regular file +> Device: ca20h/51744d Inode: 684407 Links: 1 +> Access: (0644/-rw-r--r--) Uid: ( 1000/ harding) Gid: ( 1000/ harding) +> Access: 2008-12-20 21:48:32.000000000 -0500 +> Modify: 2008-12-20 21:42:10.000000000 -0500 +> Change: 2008-12-20 21:42:10.000000000 -0500 +> File: `test/old.mdwn' +> Size: 33 Blocks: 8 IO Block: 4096 regular file +> Device: ca20h/51744d Inode: 684161 Links: 1 +> Access: (0644/-rw-r--r--) Uid: ( 1000/ harding) Gid: ( 1000/ harding) +> Access: 2008-12-20 21:48:32.000000000 -0500 +> Modify: 2008-12-20 21:27:09.000000000 -0500 +> Change: 2008-12-20 21:27:09.000000000 -0500 +> +> My ikiwiki configuration file (being used to port a blog from pyblosxom +> to ikiwiki): +> +> harding@mail:~$ sed 's/#.*//; /^[ ]*$/d' .ikiwiki/gnuisance.setup +> use IkiWiki::Setup::Standard { +> wikiname => "HardingBlog", +> adminemail => 'dave@dtrt.org', +> srcdir => "/srv/backup/git/gnuisance.net", +> destdir => "/srv/test.dtrt.org", +> url => "http://srv.dtrt.org", +> wrappers => [ +> ], +> atom => 1, +> syslog => 0, +> prefix_directives => 1, +> add_plugins => [qw{goodstuff tag}], +> disable_plugins => [qw{passwordauth}], +> tagbase => "tag", +> } +> +> --[David A. Harding](http://dtrt.org/), 2008-12-20 + +Thank you for a textbook excellent reproduction recipe. + +What appears to be going on here is that meta directives are not processed +until the leaf pages are rendered, and thus the ctime setting is not +available at the time that they are inlined, and the newer unix ctime is +used. On the second build, the meta data has already been recorded. + +This can probably be avoided by processing meta date at scan time. + +Verified, fix works. [[done]] +--[[Joey]] diff --git a/doc/bugs/links_misparsed_in_CSV_files.mdwn b/doc/bugs/links_misparsed_in_CSV_files.mdwn new file mode 100644 index 000000000..169c070e7 --- /dev/null +++ b/doc/bugs/links_misparsed_in_CSV_files.mdwn @@ -0,0 +1,17 @@ +If a link inside a CSV file contains two or more underscores (\_), then it will get mis-parsed by the table plugin. + +e.g. \[[single\_track\_lines]] becomes "em>lines". + +Links with only one underscore are OK. + +Update 2008-11-24: The problem only occurs if the CSV data is in an external file. If I load it using data="""...""" then it works fine. + +The problem appears to be the call to htmlize inside genrow. If the data is inline, then wikilinks get expanded before they get here, and are OK. If the data is from an external file, the wikilinks aren't expanded, and htmlize will expand \[[single\_track\_lines]] into \[[single<em>track</em>lines]]. + +Oh, wait, I see the problem. IkiWiki::linkify is only called if the external file doesn't exist. If I remove this check and always call IkiWiki::linkify, then the problem is solved. + +(this is inside /usr/share/perl5/IkiWiki/Plugin/table.pm). + +I am rather confused what this check does, and the fact the comments are very different for CSV and DSV when the code is the same doesn't seem to help. + +-- Brian May diff --git a/doc/bugs/lockedit_plugin_should_alert_user_about_an_invalid_pagespec_in_preferences.mdwn b/doc/bugs/lockedit_plugin_should_alert_user_about_an_invalid_pagespec_in_preferences.mdwn index c835d9f98..b8023ce87 100644 --- a/doc/bugs/lockedit_plugin_should_alert_user_about_an_invalid_pagespec_in_preferences.mdwn +++ b/doc/bugs/lockedit_plugin_should_alert_user_about_an_invalid_pagespec_in_preferences.mdwn @@ -1,4 +1,4 @@ -[[plugins/lockedit]] adds the form fields for a [[pagespec]] to preferences. This pagespec should be supplied "raw"; i.e., without quotes around it. Inexperienced users (such as [[myself|jondowland]]) may provide an invalid pagespec, such as one with quotes on it. This will be merrily accepted by the form, but will cause no locking to take place. +[[plugins/lockedit]] adds the form fields for a [[pagespec]] to preferences. This pagespec should be supplied "raw"; i.e., without quotes around it. Inexperienced users (such as [[myself|users/jon]]) may provide an invalid pagespec, such as one with quotes on it. This will be merrily accepted by the form, but will cause no locking to take place. Perhaps some validation should be performed on the pagespec and the form-submission return include "warning: this pagespec is invalid" or "warning: this pagespec does not match any existing pages" or similar. @@ -15,3 +15,7 @@ Perhaps some validation should be performed on the pagespec and the form-submiss > There are small classes of invalid pagespecs. For example, `(foo or bar` > is invalid due to having unbalanced parens, while `foo or and bar` > has invalid syntax. It's possible to detect these, I guess ... --[[Joey]] + +>> Having moved it to the .setup file makes things more obvious I think. +>> Anyway I consider this [[done]], please de-done this if you disagree. +>> --[[Jon]] diff --git a/doc/bugs/login_page_should_note_cookie_requirement.mdwn b/doc/bugs/login_page_should_note_cookie_requirement.mdwn index e2d5a352b..96686053c 100644 --- a/doc/bugs/login_page_should_note_cookie_requirement.mdwn +++ b/doc/bugs/login_page_should_note_cookie_requirement.mdwn @@ -4,6 +4,13 @@ At the moment, you go through the login shuffle and then are told that cookies a > websites that have a login require cookies. Such warnings used to be > common, but few sites bother with them anymore. --[[Joey]] +>> Very few websites break without cookies. Even fewer lose data. +>> Can ikiwiki avoid being below average by default? --[MJR](http://mjr.towers.org.uk) + +>>> Can we avoid engaging in hyperbole? (Hint: Your browser probably has a +>>> back button. Hint 2: A username/password does not count as "lost data". +>>> Hint 3: Now we're arguing, which is pointless.) --[[Joey]] + Even better would be to only display the cookie note as a warning if the login page doesn't receive a session cookie. > I considered doing this before, but it would require running the cgi once @@ -11,7 +18,16 @@ Even better would be to only display the cookie note as a warning if the login p > time to check if it took, which is both complicated and probably would > look bad. +>> Might this be possible client-side with javascript? A quick google suggests it is possible: +>> <http://www.javascriptkit.com/javatutors/cookiedetect.shtml>. MJR, want to try adding +>> that? -- [[Will]] + Best of all would be to use URL-based or hidden-field-based session tokens if cookies are not permitted. > This is not very doable since most of the pages the user browses are > static pages in a static location. + +>> The pages that lose data without cookies (the edit pages, primarily) +>> don't look static. Are they really? --[MJR](http://mjr.towers.org.uk) + +>>> As soon as you post an edit page, you are back to a static website. diff --git a/doc/bugs/markdown_bug:_email_escaping_and_plus_addresses.mdwn b/doc/bugs/markdown_bug:_email_escaping_and_plus_addresses.mdwn index 5c04dce03..6fccc5c86 100644 --- a/doc/bugs/markdown_bug:_email_escaping_and_plus_addresses.mdwn +++ b/doc/bugs/markdown_bug:_email_escaping_and_plus_addresses.mdwn @@ -8,7 +8,7 @@ compare: It seems putting a '+' in there throws it. Maybe it's a markdown bug, or maybe the obfuscation markdown applies to email-links is being caught by the HTML sanitizer. - -- [[JonDowland]] + -- [[users/Jon]] > It's a markdown bug. For some reason, markdown doesn't recognize the email with a '+' as an email: > diff --git a/doc/bugs/mercurial_fail_to_add.mdwn b/doc/bugs/mercurial_fail_to_add.mdwn index dab40d684..3bbf4e5fd 100644 --- a/doc/bugs/mercurial_fail_to_add.mdwn +++ b/doc/bugs/mercurial_fail_to_add.mdwn @@ -6,7 +6,7 @@ Here is a patch that's seems to work, although I'm not quite sure what's wrong w --- mercurial.pm 2007-03-24 16:14:35.000000000 +0100 +++ /home/hbernard/mercurial.pm 2007-04-19 19:05:47.000000000 +0200 @@ -95,7 +95,7 @@ - sub rcs_add ($) { # {{{ + sub rcs_add ($) { my ($file) = @_; - my @cmdline = ("hg", "-q", "-R", "$config{srcdir}", "add", "$file"); diff --git a/doc/bugs/methodResponse_in_add__95__plugins.mdwn b/doc/bugs/methodResponse_in_add__95__plugins.mdwn index 8a88f4eda..c82b532db 100644 --- a/doc/bugs/methodResponse_in_add__95__plugins.mdwn +++ b/doc/bugs/methodResponse_in_add__95__plugins.mdwn @@ -26,7 +26,7 @@ index e476521..d43abd4 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -471,7 +471,11 @@ sub loadplugins () { #{{{ + @@ -471,7 +471,11 @@ sub loadplugins () { unshift @INC, possibly_foolish_untaint($config{libdir}); } diff --git a/doc/bugs/multiple_pages_with_same_name.mdwn b/doc/bugs/multiple_pages_with_same_name.mdwn index 5ddfb1f6b..20c38c062 100644 --- a/doc/bugs/multiple_pages_with_same_name.mdwn +++ b/doc/bugs/multiple_pages_with_same_name.mdwn @@ -28,14 +28,14 @@ Suggestions welcome. index 4e4da11..853f905 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -618,7 +618,7 @@ sub pagename ($) { #{{{ + @@ -618,7 +618,7 @@ sub pagename ($) { my $type=pagetype($file); my $page=$file; - $page=~s/\Q.$type\E*$// if defined $type; + $page=~s/\Q.$type\E*$// if defined $type && !$hooks{htmlize}{$type}{leavesuffix}; return $page; - } #}}} + } diff --git a/t/pagename.t b/t/pagename.t index 96e6a87..58811b9 100755 @@ -61,7 +61,7 @@ I wonder if this patch will also be useful: index 752d176..3f1b67b 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm - @@ -279,7 +279,11 @@ sub refresh () { #{{{ + @@ -279,7 +279,11 @@ sub refresh () { else { $f=~s/^\Q$config{srcdir}\E\/?//; push @files, $f; diff --git a/doc/bugs/output_of_successful_rename_should_list_the_full_path_to_affected_pages.mdwn b/doc/bugs/output_of_successful_rename_should_list_the_full_path_to_affected_pages.mdwn new file mode 100644 index 000000000..2d9677e7f --- /dev/null +++ b/doc/bugs/output_of_successful_rename_should_list_the_full_path_to_affected_pages.mdwn @@ -0,0 +1,12 @@ +I've just renamed a page and received the following as a result: + +<p> +<b>Successfully renamed users/jondowland.mdwn to users/jon.mdwn.</b> +</p> +<p> + +The following pages have been automatically modified to update their links to users/jon.mdwn: +<ul> +<li><a href="./../../tips/convert_mediawiki_to_ikiwiki/discussion/">discussion</a></li><li><a href="./../../tips/untrusted_git_push/discussion/">discussion</a></li></ul>... + +In this situation I think the link to pages should be expanded to show the entire path, since there is quite likely to be a lot of things like "discussion". -- [[users/Jon]] diff --git a/doc/bugs/pagespec_parsing_chokes_on_function__40____41__.mdwn b/doc/bugs/pagespec_parsing_chokes_on_function__40____41__.mdwn index a2eba694c..78fed0e5d 100644 --- a/doc/bugs/pagespec_parsing_chokes_on_function__40____41__.mdwn +++ b/doc/bugs/pagespec_parsing_chokes_on_function__40____41__.mdwn @@ -54,7 +54,7 @@ case the user is given to rebuilding the wiki by hand. --Ethan + } return IkiWiki::FailReason->new("syntax error") if $@; return $ret; - } #}}} + } </pre> > Thanks, [[done]] --[[Joey]] diff --git a/doc/bugs/prune_causing_taint_mode_failures.mdwn b/doc/bugs/prune_causing_taint_mode_failures.mdwn index 1876d9129..5fc1d8b75 100644 --- a/doc/bugs/prune_causing_taint_mode_failures.mdwn +++ b/doc/bugs/prune_causing_taint_mode_failures.mdwn @@ -11,7 +11,7 @@ I've no idea what's happening (hey, I'm a C programmer), but I've hacked prune() <pre> use Scalar::Util qw(tainted); -sub prune ($) { #{{{ +sub prune ($) { my $file=shift; unlink($file); @@ -25,7 +25,7 @@ sub prune ($) { #{{{ $dir = $1; } } -} #}}} +} </pre> > Old versions of perl are known to have bugs with taint checking. diff --git a/doc/bugs/quieten_mercurial.mdwn b/doc/bugs/quieten_mercurial.mdwn index 26f833e5f..3fd75ea1b 100644 --- a/doc/bugs/quieten_mercurial.mdwn +++ b/doc/bugs/quieten_mercurial.mdwn @@ -6,7 +6,7 @@ messages which are then taken for CGI output, causing errors and general trouble @@ -55,7 +55,7 @@ } - sub rcs_update () { #{{{ + sub rcs_update () { - my @cmdline = ("hg", "-R", "$config{srcdir}", "update"); + my @cmdline = ("hg", "-q", "-R", "$config{srcdir}", "update"); if (system(@cmdline) != 0) { @@ -22,7 +22,7 @@ messages which are then taken for CGI output, causing errors and general trouble if (system(@cmdline) != 0) { warn "'@cmdline' failed: $!"; @@ -92,7 +92,7 @@ - sub rcs_add ($) { # {{{ + sub rcs_add ($) { my ($file) = @_; - my @cmdline = ("hg", "-R", "$config{srcdir}", "add", "$file"); diff --git a/doc/bugs/search_for_locale_data_in_the_installed_location.mdwn b/doc/bugs/search_for_locale_data_in_the_installed_location.mdwn index 0a2b1efea..dace2ca19 100644 --- a/doc/bugs/search_for_locale_data_in_the_installed_location.mdwn +++ b/doc/bugs/search_for_locale_data_in_the_installed_location.mdwn @@ -2,7 +2,7 @@ It seems like gettext only searches for locale information in /usr/share/locale, --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -1057,6 +1057,7 @@ sub gettext { #{{{ + @@ -1057,6 +1057,7 @@ sub gettext { $gettext_obj=undef; return shift; } diff --git a/doc/bugs/stray___60____47__p__62___tags.mdwn b/doc/bugs/stray___60____47__p__62___tags.mdwn new file mode 100644 index 000000000..6e508ffda --- /dev/null +++ b/doc/bugs/stray___60____47__p__62___tags.mdwn @@ -0,0 +1,15 @@ +When using the [[plugins/htmltidy]] plugin (and possibly in other circumstances), ikiwiki sometimes creates more `</p>` tags than `<p>` tags, causing unbalanced markup. I've previously noticed unbalanced tags when a `\[[!map]]` matches no pages. This is part of the reason I developed [[plugins/htmlbalance]]. + +This is particularly noticeable if htmltidy is enabled when building the docwiki: on the 'contrib' plugin pages, the title becomes `foo </p> (third-party plugin)` (with the angle-brackets escaped - it seems the text gets sanitized but is then escaped anyway). + +I believe that this snippet in `IkiWiki.pm` might be the reason for the imbalance: + + if ($oneline) { + # hack to get rid of enclosing junk added by markdown + # and other htmlizers + $content=~s/^<p>//i; + $content=~s/<\/p>$//i; + chomp $content; + } + +The fact that HTML in a `\[[!meta title]]` is added but then escaped might indicate that some other bug is involved. diff --git a/doc/bugs/tbasewiki__95__brokenlinks.t_broken.mdwn b/doc/bugs/tbasewiki__95__brokenlinks.t_broken.mdwn index ac895896a..db3917d21 100644 --- a/doc/bugs/tbasewiki__95__brokenlinks.t_broken.mdwn +++ b/doc/bugs/tbasewiki__95__brokenlinks.t_broken.mdwn @@ -25,12 +25,12 @@ After some digging I found that HTML::Template is being required after the new s filter => sub { my $text_ref = shift; @@ -857,6 +856,7 @@ - } #}}} + } - sub template ($;@) { #{{{ + sub template ($;@) { + require HTML::Template; HTML::Template->new(template_params(@_)); - } #}}} + } **That** gave me: diff --git a/doc/bugs/textile_plugin_dies_if_input_has_a_non-utf8_character.mdwn b/doc/bugs/textile_plugin_dies_if_input_has_a_non-utf8_character.mdwn index 7ec1edc4e..bdd07210e 100644 --- a/doc/bugs/textile_plugin_dies_if_input_has_a_non-utf8_character.mdwn +++ b/doc/bugs/textile_plugin_dies_if_input_has_a_non-utf8_character.mdwn @@ -9,6 +9,6 @@ The first two complaints happen if textile is not loaded, the third fatal one ha 0x92 is "single quote" in the evil windows default codepage. It would be nice to handle this gracefully and not abort ikiwiki at this point, or alternatively, die fatally but mention which input page caused the error. -Interestingly enough, in my case, the input file has several other bad windows characters (0xFC, u-umlaut) which have not caused ikiwiki to abort. ikiwiki version 2.50. -- [[JonDowland]] +Interestingly enough, in my case, the input file has several other bad windows characters (0xFC, u-umlaut) which have not caused ikiwiki to abort. ikiwiki version 2.50. -- [[users/Jon]] > Fixed in git. [[done]] --[[Joey]] diff --git a/doc/download.mdwn b/doc/download.mdwn index 86ddb46b2..1099045b5 100644 --- a/doc/download.mdwn +++ b/doc/download.mdwn @@ -40,6 +40,8 @@ Gentoo has an [ebuild](http://bugs.gentoo.org/show_bug.cgi?id=144453) in its bug IkiWiki can be installed [from macports](http://www.macports.org/ports.php?by=name&substr=ikiwiki) by running `sudo port install ikiwiki`. +A [PKGBUILD for Arch Linux](http://aur.archlinux.org/packages.php?ID=12284) is in the AUR. + ## revision control Ikiwiki is developed in a [[git_repository|git]]. diff --git a/doc/examples/blog.mdwn b/doc/examples/blog.mdwn index 3e89c4b99..ab73f0204 100644 --- a/doc/examples/blog.mdwn +++ b/doc/examples/blog.mdwn @@ -12,7 +12,13 @@ Some additional configuration you might want to do: example of how to tag a post is: \[[!tag tags/life]] -* Enable the sidebar plugin to get a sidebar listing all the categories - you've tagged posts with. +* Enable the [[sidebar|plugins/sidebar]] plugin to get a sidebar listing all + the categories you've tagged posts with. -* Enable the pagestats plugin to get a tag cloud display on the [[index]]. +* Enable the [[pagestats|plugins/pagestats]] plugin to get a tag cloud + to display on the [[index]]. + +* Enable the [[comments|plugins/comments]] plugin and configure it to + enable comments to posts to the blog: + + comments_pagespec => 'blog/posts/* and !*/Discussion', diff --git a/doc/forum/discussion.mdwn b/doc/forum/discussion.mdwn index 1e55d3f57..93cf4656e 100644 --- a/doc/forum/discussion.mdwn +++ b/doc/forum/discussion.mdwn @@ -1,4 +1,4 @@ -I like the idea of this forum heirarchy -- but I think a map would be clearer than inlining the sub-pages. -- [[JonDowland]] +I like the idea of this forum heirarchy -- but I think a map would be clearer than inlining the sub-pages. -- [[users/Jon]] > The easier way to accomplish this is to set archive=yes in the inline. > Switching to archive view can be useful when there are a lot of long diff --git a/doc/git.mdwn b/doc/git.mdwn index e7f47f5a0..e9c2e040f 100644 --- a/doc/git.mdwn +++ b/doc/git.mdwn @@ -16,17 +16,29 @@ Or like this if your firewall only passes http traffic (slow): The gitweb is [here](http://git.ikiwiki.info/?p=ikiwiki). -There is also a mirror [on github](http://github.com/joeyh/ikiwiki/tree/master). - Commits to this git repository are fed into [CIA](http://cia.vc), and can be browsed, subscribed to etc on its [project page](http://cia.vc/stats/project/ikiwiki). They're also fed into [twitter](http://twitter.com/ikiwiki). -## branches +## personal git repositories You are of course free to set up your own ikiwiki git repository with your -own [[patches|patch]]. +own [[patches|patch]]. If you list it here, the `gitremotes` script will +automatically add it to git remotes. Your repo will automatically be pulled +into [[Joey]]'s working tree. This is recommended. :-) + +<!-- Machine-parsed format: * wikilink <git:url> --> + +* github `git://github.com/joeyh/ikiwiki.git` + ([browse](http://github.com/joeyh/ikiwiki/tree/master)) + A mirror of the main repo, automatically updated. +* [[smcv]] `git://git.pseudorandom.co.uk/git/smcv/ikiwiki.git` +* [[intrigeri]] `git://gaffer.ptitcanardnoir.org/ikiwiki.git` +* [[gmcmanus]] `git://github.com/gmcmanus/ikiwiki.git` +* [[jelmer]] `git://git.samba.org/jelmer/ikiwiki.git` + +## branches Some of the branches included in the main repository include: @@ -38,9 +50,9 @@ Some of the branches included in the main repository include: * `wikiwyg` adds [[todo/wikiwyg]] support. It is unmerged pending some changes. * `darcs` is being used to add darcs support. -* `pristine-tar` contains deltas that - [pristine-tar](http://kitenet.net/~joey/code/pristine-tar) - can use to recreate released tarballs of ikiwiki * `debian-stable` is used for updates to the old version included in Debian's stable release, and `debian-testing` is used for updates to Debian's testing release. +* `pristine-tar` contains deltas that + [pristine-tar](http://kitenet.net/~joey/code/pristine-tar) + can use to recreate released tarballs of ikiwiki diff --git a/doc/ikiwiki/directive/cutpaste.mdwn b/doc/ikiwiki/directive/cutpaste.mdwn index 012367bdf..ca580e54f 100644 --- a/doc/ikiwiki/directive/cutpaste.mdwn +++ b/doc/ikiwiki/directive/cutpaste.mdwn @@ -17,11 +17,11 @@ follow the paste directive that uses its text. In fact, this is quite useful to postpone big blocks of text like long annotations and have a more natural flow. For example: - \[[!toggleable id="cut" text="\[[!paste id=cutlongdesc]]"]] - \[[!toggleable id="copy" text="\[[!paste id=copylongdesc]]"]] - \[[!toggleable id="paste" text="\[[!paste id=pastelongdesc]]"]] + \[[!toggleable id="cut" text="[[!paste id=cutlongdesc]]"]] + \[[!toggleable id="copy" text="[[!paste id=copylongdesc]]"]] + \[[!toggleable id="paste" text="[[!paste id=pastelongdesc]]"]] - \[...some time later...] + [...some time later...] \[[!cut id=cutlongdesc text=""" blah blah blah @@ -40,7 +40,7 @@ Since you can paste without using double quotes, copy and paste can be used to nest directives that require multiline parameters inside each other: \[[!toggleable id=foo text=""" - \[[!toggleable id=bar text="\[[!paste id=baz]]"]] + [[!toggleable id=bar text="[[!paste id=baz]]"]] """]] \[[!cut id=baz text=""" diff --git a/doc/ikiwiki/directive/inline.mdwn b/doc/ikiwiki/directive/inline.mdwn index 9889cda11..40670e1e7 100644 --- a/doc/ikiwiki/directive/inline.mdwn +++ b/doc/ikiwiki/directive/inline.mdwn @@ -73,6 +73,8 @@ Here are some less often needed parameters: configured to `allowatom`, set to "yes" to enable. * `feeds` - controls generation of all types of feeds. Set to "no" to disable generating any feeds. +* `emptyfeeds` - Set to "no" to disable generation of empty feeds. + Has no effect if `rootpage` or `postform` is set. * `template` - Specifies the template to fill out to display each inlined page. By default the `inlinepage` template is used, while the `archivepage` template is used for archives. Set this parameter to @@ -97,11 +99,16 @@ Here are some less often needed parameters: in the blog. The format string is passed to the strftime(3) function. * `feedpages` - A [[PageSpec]] of inlined pages to include in the rss/atom feeds. The default is the same as the `pages` value above, and only pages - matches by that value are included, but some of those can be excluded by + matched by that value are included, but some of those can be excluded by specifying a tighter [[PageSpec]] here. * `guid` - If a URI is given here (perhaps a UUID prefixed with `urn:uuid:`), the Atom feed will have this as its `<id>`. The default is to use the URL of the page containing the `inline` directive. +* `feedfile` - Can be used to change the name of the file generated for the + feed. This is particularly useful if a page contains multiple feeds. + For example, set "feedfile=feed" to cause it to generate `page/feed.atom` + and/or `page/feed.rss`. This option is not supported if the wiki is + configured not to use `usedirs`. [[!meta robots="noindex, follow"]] diff --git a/doc/ikiwiki/directive/tag.mdwn b/doc/ikiwiki/directive/tag.mdwn index 267aee660..64736f8cd 100644 --- a/doc/ikiwiki/directive/tag.mdwn +++ b/doc/ikiwiki/directive/tag.mdwn @@ -21,6 +21,10 @@ located under a base directory, such as "tags/". This is a useful way to avoid having to write the full path to tags, if you want to keep them grouped together out of the way. +Bear in mind that specifying a tagbase means you will need to incorporate it +into the `link()` [[ikiwiki/PageSpec]] you use: e.g., if your tagbase is +`tag`, you would match pages tagged "foo" with `link(tag/foo)`. + If you want to override the tagbase for a particular tag, you can use something like this: diff --git a/doc/ikiwiki/pagespec.mdwn b/doc/ikiwiki/pagespec.mdwn index 176228e4b..d4dd265cc 100644 --- a/doc/ikiwiki/pagespec.mdwn +++ b/doc/ikiwiki/pagespec.mdwn @@ -47,8 +47,8 @@ Some more elaborate limits can be added to what matches using these functions: wiki admins. * "`ip(address)`" - tests whether a modification is being made from the specified IP address. -* Some additional special-purpose limits may be enabled, for matching - [[attachments|attachment]] and [[translations|po]]. +* "`postcomment(glob)`" - matches only when comments are being + posted to a page matching the specified glob For example, to match all pages in a blog that link to the page about music and were written in 2005: diff --git a/doc/ikiwiki/pagespec/attachment.mdwn b/doc/ikiwiki/pagespec/attachment.mdwn index 2d33db748..344a4a734 100644 --- a/doc/ikiwiki/pagespec/attachment.mdwn +++ b/doc/ikiwiki/pagespec/attachment.mdwn @@ -9,7 +9,7 @@ configuration setting. For example, to limit arbitrary files to 50 kilobytes, but allow larger mp3 files to be uploaded by joey into a specific directory, and -check all attachments for virii, something like this could be used: +check all attachments for viruses, something like this could be used: virusfree() and ((user(joey) and podcast/*.mp3 and mimetype(audio/mpeg) and maxsize(15mb)) or (!ispage() and maxsize(50kb))) diff --git a/doc/ikiwikiusers.mdwn b/doc/ikiwikiusers.mdwn index 2492fd211..0c45efa4f 100644 --- a/doc/ikiwikiusers.mdwn +++ b/doc/ikiwikiusers.mdwn @@ -101,13 +101,18 @@ Personal sites and blogs * [Olivier Berger's professional homepage](http://www-public.it-sudparis.eu/~berger_o/) * [Andrey Tarantsov's homepage](http://www.tarantsov.com/) * [Don Marti's blog](http://zgp.org/~dmarti/) -* [[JonDowland]]'s [homepage](http://jmtd.net/) +* [[users/Jon]]'s [homepage](http://jmtd.net/) * [[xma]] is using ikiwiki (<http://maillard.mobi/~xma/>) * [[JanWalzer|jwalzer]]'s [homepage](http://wa.lzer.net/) -- Work in Progress +* [[Adam_Trickett|ajt]]'s home intranet/sanbox system ([Internet site & blog](http://www.iredale.net/) -- not ikiwiki yet) + +Schools +======= +* [St Hugh of Lincoln Primary School in Surrey](http://hugh.vm.bytemark.co.uk/) Please feel free to add your own ikiwiki site! -See also: [Debian ikiwiki popcon graph](http://people.debian.org/~igloo/popcon-graphs/index.php?packages=ikiwiki) +See also: [Debian ikiwiki popcon graph](http://popcon.debian.org/~igloo/popcon-graphs/index.php?packages=ikiwiki) and [google search for ikiwiki powered sites](http://www.google.com/search?q=%22powered%20by%20ikiwiki%22). While nothing makes me happier than knowing that ikiwiki has happy users, dropping some change in the [[TipJar]] is a nice way to show extra appreciation. diff --git a/doc/ikiwikiusers/discussion.mdwn b/doc/ikiwikiusers/discussion.mdwn index 2da3668d4..39a9bb921 100644 --- a/doc/ikiwikiusers/discussion.mdwn +++ b/doc/ikiwikiusers/discussion.mdwn @@ -29,3 +29,7 @@ Thanks, --[[Chao]] Thanks for the reply Joey! ikiwiki is a fantastic wiki complier, though I do not have my own machine momentarily, i will pay close attention to its development. Hopefully I will be one of the ikiwiki users one day :) cheers --[[Chao]] + +---- + +Are there automated hosting sites for ikiwiki yet? If you know one, can you add one in a new section on [[ikiwikiusers]] please? If you don't know any and you're willing to pay to set one up (shouldn't be much more expensive than a single ikiwiki IMO), [contact me](http://www.ttllp.co.uk/contact.html) and let's talk. -- MJR diff --git a/doc/install.mdwn b/doc/install.mdwn index d745737aa..cc3a4c29f 100644 --- a/doc/install.mdwn +++ b/doc/install.mdwn @@ -19,15 +19,6 @@ they are available. Various [[plugins]] use other perl modules and utilities; see their individual documentation for details. -### Installing dependencies with yum - -Here's an example of how to install ikiwiki's dependencies using yum -on Fedora 7: - - yum install perl-Text-Markdown perl-Mail-Sendmail perl-HTML-Scrubber \ - perl-XML-Simple perl-TimeDate perl-HTML-Template perl-CGI-FormBuilder \ - perl-CGI-Session perl-File-MimeInfo perl-gettext perl-Authen-Passphrase - ### Installing dependencies by hand If you want to install by hand from the tarball, you should make sure that diff --git a/doc/install/discussion.mdwn b/doc/install/discussion.mdwn index 2b88f6e66..b5757070f 100644 --- a/doc/install/discussion.mdwn +++ b/doc/install/discussion.mdwn @@ -172,9 +172,9 @@ Not sure how to provide proper version information for you.--[[vibrog]] --- -I've tried a couple of times and my cpan has never recognised Bundle::IkiWiki. Is that section of the page still accurate? -- [[JonDowland]] +I've tried a couple of times and my cpan has never recognised Bundle::IkiWiki. Is that section of the page still accurate? -- [[users/Jon]] > Are you running perl with the environemnt settings specified on the page? > Can you show how it fails to find the bundle? --[[Joey]] ->> I was not. Next time I build I will have to try that (I'll need to tweak it as I already override PERL5LIB; also I need to specify http proxies). Thanks for your help! -- [[JonDowland]] +>> I was not. Next time I build I will have to try that (I'll need to tweak it as I already override PERL5LIB; also I need to specify http proxies). Thanks for your help! -- [[users/Jon]] diff --git a/doc/news/openid/discussion.mdwn b/doc/news/openid/discussion.mdwn index 3d5e9dc22..aa9f3f0be 100644 --- a/doc/news/openid/discussion.mdwn +++ b/doc/news/openid/discussion.mdwn @@ -42,7 +42,7 @@ only Apache/iptables rules for this? Maybe it's related to > Error: /srv/web/ikiwiki.info/todo/Configurable_minimum_length_of_log_message_for_web_edits/index.html independently created, not overwriting with version from todo/Configurable_minimum_length_of_log_message_for_web_edits -[[jondowland]] +[[users/jon]] ---- diff --git a/doc/news/version_2.66.mdwn b/doc/news/version_2.66.mdwn deleted file mode 100644 index 029c7a1b9..000000000 --- a/doc/news/version_2.66.mdwn +++ /dev/null @@ -1,39 +0,0 @@ -ikiwiki 2.66 released with [[!toggle text="these changes"]] -[[!toggleable text=""" - * recentchanges: Fix redirects to non-page files. - * aggregate: Avoid uninitialized value warnings for pages with no recorded - ctime. - * attachment: Add admin() pagespec to test if the uploading user is a wiki - admin. - * git: Fix handling of utf-8 filenames in recentchanges. - * tag: Make edit link for new tags ensure that the tags are created - inside tagbase, when it's set. - * template: Make edit link for new templates ensure the page is located - under toplevel templates directory. - * htmlscrubber: Add a config setting that can be used to disable the - scrubber acting on a set of pages. - * Expand usage message and add --help. Closes: #[500344](http://bugs.debian.org/500344) - * Beautify urls used in various places. (smcv) - * Export pagetitle, titlepage, linkpage. - * htmltidy: Avoid returning undef if tidy fails. Also avoid returning the - untidied content if tidy crashes. In either case, it seems best to tidy - the content to nothing. - * htmltidy: Avoid spewing tidy errors to stderr. - * Reorganize index file, add a format version field. Upgrades to the new - index format should be transparent. - * Add %wikistate, which is like %pagestate except not specific to a given - page, and is preserved across rebuilds. - * editpage: Be more aggressive (and less buggy) about cleaning up - temporary files rendered during page preview. - * Add an indexpages option, which causes foo/index.mdwn to be the source - for page foo when foo.mdwn doesn't exist. Also, when it's enabled, - creating a new page will save it to foo/index.mdwn by default. - Closes: #[474611](http://bugs.debian.org/474611) - (Sponsored by The TOVA Company.) - * httpauth: Document that ikiwiki.cgi has to be in a directory subject to - authentication. Closes: #[500524](http://bugs.debian.org/500524) - * inline: Fix handling of rootpage that doesn't exist. - * attachment: Support adding attachments to pages even as they are being - created. - * remove, rename: Allow acting on attachments as a page is being created. - * Updated French translation. Closes: #[500929](http://bugs.debian.org/500929)"""]]
\ No newline at end of file diff --git a/doc/news/version_2.67.mdwn b/doc/news/version_2.67.mdwn deleted file mode 100644 index a0911b58f..000000000 --- a/doc/news/version_2.67.mdwn +++ /dev/null @@ -1,17 +0,0 @@ -ikiwiki 2.67 released with [[!toggle text="these changes"]] -[[!toggleable text=""" - * remove: Avoid $\_ breakage. (Stupid, stupid perl.) - * Updated Spanish translation from Victor Moral. - * lockedit: Support specifying which users (and IP addresses) a page - is locked for. This supports most of the ACL type things users have been - wanting to be done. Closes: #[443346](http://bugs.debian.org/443346) (It does not control who can read a - page, but that's out of scope for ikiwiki.) - * orphans: Fix unquoted page name in regexp. - * google: Plugin provides google site search, contributed by Peter Simons. - * Pass HTTPS variable through the wrapper so that CGI->https can be used - by plugins. Closes: #[502047](http://bugs.debian.org/502047) - * inline: Allow MTIME to be used in inlinepage.tmpl. - * inline: Use the feed's description in the rss and atom links. - Closes: #[502113](http://bugs.debian.org/502113) - * aggregate: Avoid bug that caused immediate expiration of items - with a date in the future."""]]
\ No newline at end of file diff --git a/doc/news/version_2.71.mdwn b/doc/news/version_2.71.mdwn new file mode 100644 index 000000000..2c8609988 --- /dev/null +++ b/doc/news/version_2.71.mdwn @@ -0,0 +1,28 @@ +ikiwiki 2.71 released with [[!toggle text="these changes"]] +[[!toggleable text=""" + * comments: Blog-style comment support, contributed by Simon McVittie. + * htmlbalance: New plugin contributed by Simon McVittie. + * Change deb dependencies to list Text::Markdown before markdown (really + this time). + * Improve escaping of wikilinks and preprocessor directives in content + produced by aggregate and recentchanges. + * French translation update from Philippe Batailler. Closes: #[506250](http://bugs.debian.org/506250) + * Spanish translation update from Victor Moral. + * Fix handling of wrappergroup option. + * Correct --dumpsetup to include the srcdir in the setup file. + * German translation update from Kai Wasserbäch. Closes: #[507056](http://bugs.debian.org/507056) + * inline: Support emptyfeeds=no option to skip generating empty feeds. + * inline: Support feedfile option to change the filename of the feed + generated. + * meta: Pass info to htmlscrubber so htmlscrubber\_skip can take effect. + * htmlbalance: don't compact whitespace, and set misc other options (smcv) + * rename: Fix double-escaping of page name in edit box. + * monotone: When getting the log, tell monotone how many entries + we want, rather than closing the pipe, which it dislikes. (thm) + * Coding style change: Remove explcit vim folding markers. + * aggregate: If a feed fails to be downloaded, try again immediatly + next time aggregation is run, even if the usual time has not passed. + Closes: #[508622](http://bugs.debian.org/508622) (Michael Gold) + * meta: Process meta date during scan pass so that the date will always + affect sorting in inlines. + * Improve display of some openids (smcv)"""]]
\ No newline at end of file diff --git a/doc/news/version_2.72.mdwn b/doc/news/version_2.72.mdwn new file mode 100644 index 000000000..26274a3c8 --- /dev/null +++ b/doc/news/version_2.72.mdwn @@ -0,0 +1,9 @@ +ikiwiki 2.72 released with [[!toggle text="these changes"]] +[[!toggleable text=""" + * Avoid comments in recentchanges being broken links (smcv) + * Add deprecation warning for GlobLists, which will stop working in 3.0. + * camelcase: Add camelcase\_ignore setting. + * googlecalendar: Add runtime deprecation warning. + * comments: Deal with users entering unqualified or partial urls. + * inline: Run format hook first, to ensure other format hooks can affect + inlined content. Closes: #[509710](http://bugs.debian.org/509710)"""]]
\ No newline at end of file diff --git a/doc/plugins/aggregate.mdwn b/doc/plugins/aggregate.mdwn index c40a6dc22..6fc87853b 100644 --- a/doc/plugins/aggregate.mdwn +++ b/doc/plugins/aggregate.mdwn @@ -9,9 +9,9 @@ New users of aggregate should enable the `aggregateinternal => 1` option in the .setup file. If you don't do so, you will need to enable the [[html]] plugin as well as aggregate itself, since feed entries will be stored as HTML. -The [[meta]] and [[tag]] plugins are also recommended. The -[[htmltidy]] plugin is suggested, since feeds can easily contain html -problems, some of which tidy can fix. +The [[meta]] and [[tag]] plugins are also recommended. Either the +[[htmltidy]] or [[htmlbalance]] plugin is suggested, since feeds can easily +contain html problems, some of which these plugins can fix. You will need to run ikiwiki periodically from a cron job, passing it the --aggregate parameter, to make it check for new posts. Here's an example diff --git a/doc/plugins/aggregate/discussion.mdwn b/doc/plugins/aggregate/discussion.mdwn index 1db6240d5..1a9844577 100644 --- a/doc/plugins/aggregate/discussion.mdwn +++ b/doc/plugins/aggregate/discussion.mdwn @@ -35,7 +35,7 @@ Two things aren't working as I'd expect: > problem. You can see the feed validator complain about it here: > <http://feedvalidator.org/check.cgi?url=http%3A%2F%2Fwww.davidj.org%2Frss.xml> > -> It's sorta unfortunate that [[cpan XML::Feed]] doesn't just assume the +> It's sorta unfortunate that [[!cpan XML::Feed]] doesn't just assume the > un-esxaped html is part of the description field. Probably other feed > parsers are more lenient. --[[Joey]] diff --git a/doc/plugins/anonok.mdwn b/doc/plugins/anonok.mdwn index 2a8a922cd..ab2f744e2 100644 --- a/doc/plugins/anonok.mdwn +++ b/doc/plugins/anonok.mdwn @@ -5,5 +5,10 @@ By default, anonymous users cannot edit the wiki. This plugin allows anonymous web users, who have not signed in, to edit any page in the wiki by default. -The plugin also has a configuration setting, `anonok_pagespec`. This +The plugin also has a configuration setting, `anonok_pages`. This [[PageSpec]] can be used to allow anonymous editing of matching pages. + +If you're using the [[comments]] plugin, you can allow anonymous comments +to be posted by setting: + + anonok_pages => "postcomment(*)" diff --git a/doc/plugins/calendar/discussion.mdwn b/doc/plugins/calendar/discussion.mdwn index 148b83522..9d57b7a1e 100644 --- a/doc/plugins/calendar/discussion.mdwn +++ b/doc/plugins/calendar/discussion.mdwn @@ -2,3 +2,5 @@ It would be nice if the "month" type calendar could collect all of the matching pages on a given date in some inline type way. --[[DavidBremner]] Is it possible to get the calendar to link to pages based not on their timestamp (as I understand that it does now, or have I misunderstood this?) and instead on for example their location in a directory hierarchy. That way the calendar could be used as a planning / timeline device which I think would be great. --[[Alexander]] + +I would like the ability to specify relative previous months. This way I could have a sidebar with the last three months by specifying no month, then 'month="-1"' and 'month="-2"'. Negative numbers for the month would otherwise be invalid, so this shouldn't produce any conflicts with expected behavior. (Right?) -- [[StevenBlack]] diff --git a/doc/plugins/comments.mdwn b/doc/plugins/comments.mdwn new file mode 100644 index 000000000..72b11af64 --- /dev/null +++ b/doc/plugins/comments.mdwn @@ -0,0 +1,43 @@ +[[!template id=plugin name=comments author="[[Simon_McVittie|smcv]]"]] +[[!tag type/useful]] + +This plugin adds "blog-style" comments. Unlike the wiki-style freeform +Discussion pages, these comments are posted by a simple form, cannot later +be edited, and rss/atom feeds are provided of each page's comments. + +When using this plugin, you should also enable [[htmlscrubber]] and either +[[htmltidy]] or [[htmlbalance]]. Directives are filtered out by default, to +avoid commenters slowing down the wiki by causing time-consuming +processing. As long as the recommended plugins are enabled, comment +authorship should hopefully be unforgeable by CGI users. + +The intention is that on a non-wiki site (like a blog) you can lock all +pages for admin-only access, then allow otherwise unprivileged (or perhaps +even anonymous) users to comment on posts. See the documentation of the +[[lockedit]] and [[anonok]] pages for details on locking down a wiki so +users can only post comments. + +Individual comments are stored as internal-use pages named something like +`page/comment_1`, `page/comment_2`, etc. These pages internally use a +`\[[!_comment]]` [[ikiwiki/directive]]. + +There are some global options for the setup file: + +* `comments_pagespec`: [[ikiwiki/PageSpec]] of pages where comments are + allowed. The default is not to allow comments on any pages. To allow + comments to all posts to a blog, you could use + `blog/posts/* and !*/Discussion`. +* `comments_closed_pagespec`: [[ikiwiki/PageSpec]] of pages where + posting of new comments is closed, but any existing comments will still + be displayed. Often you will list a set of individual pages here. + For example: `blog/controversial or blog/flamewar` +* `comments_pagename`: if this is e.g. `comment_` (the default), then + comment pages will be named something like `page/comment_12` +* `comments_allowdirectives`: if true (default false), comments may + contain IkiWiki [[directives|ikiwiki/directive]] +* `comments_commit`: if true (default true), comments will be committed to + the version control system +* `comments_allowauthor`: if true (default false), anonymous commenters may + specify a name for themselves, and the \[[!meta author]] and + \[[!meta authorurl]] directives will not be overridden by the comments + plugin diff --git a/doc/plugins/comments/discussion.mdwn b/doc/plugins/comments/discussion.mdwn new file mode 100644 index 000000000..2a87a3d93 --- /dev/null +++ b/doc/plugins/comments/discussion.mdwn @@ -0,0 +1,163 @@ +## Why internal pages? (unresolved) + +Comments are saved as internal pages, so they can never be edited through the CGI, +only by direct committers. + +> So, why do it this way, instead of using regular wiki pages in a +> namespace, such as `$page/comments/*`? Then you could use [[plugins/lockedit]] to +> limit editing of comments in more powerful ways. --[[Joey]] + +>> Er... I suppose so. I'd assumed that these pages ought to only exist as inlines +>> rather than as individual pages (same reasoning as aggregated posts), though. +>> +>> lockedit is actually somewhat insufficient, since `check_canedit()` +>> doesn't distinguish between creation and editing; I'd have to continue to use +>> some sort of odd hack to allow creation but not editing. +>> +>> I also can't think of any circumstance where you'd want a user other than +>> admins (~= git committers) and possibly the commenter (who we can't check for +>> at the moment anyway, I don't think?) to be able to edit comments - I think +>> user expectations for something that looks like ordinary blog comments are +>> likely to include "others can't put words into my mouth". +>> +>> My other objection to using a namespace is that I'm not particularly happy about +>> plugins consuming arbitrary pieces of the wiki namespace - /discussion is bad +>> enough already. Indeed, this very page would accidentally get matched by rules +>> aiming to control comment-posting... :-) --[[smcv]] + +>>> Thinking about it, perhaps one way to address this would be to have the suffix +>>> (e.g. whether commenting on Sandbox creates sandbox/comment1 or sandbox/c1 or +>>> what) be configurable by the wiki admin, in the same way that recentchanges has +>>> recentchangespage => 'recentchanges'? I'd like to see fewer hard-coded page +>>> names in general, really - it seems odd to me that shortcuts and smileys +>>> hard-code the name of the page to look at. Perhaps I could add +>>> discussionpage => 'discussion' too? --[[smcv]] + +>>> (I've now implemented this in my branch. --[[smcv]]) + +>> The best reason to keep the pages internal seems to me to be that you +>> don't want the overhead of every comment spawning its own wiki page. --[[Joey]] + +## Formats (resolved) + +The plugin now allows multiple comment formats while still using internal +pages; each comment is saved as a page containing one `\[[!comment]]` directive, +which has a superset of the functionality of [[ikiwiki/directives/format]]. + +## Access control (unresolved?) + +By the way, I think that who can post comments should be controllable by +the existing plugins opendiscussion, anonok, signinedit, and lockedit. Allowing +posting comments w/o any login, while a nice capability, can lead to +spam problems. So, use `check_canedit` as at least a first-level check? +--[[Joey]] + +> This plugin already uses `check_canedit`, but that function doesn't have a concept +> of different actions. The hack I use is that when a user comments on, say, sandbox, +> I call `check_canedit` for the pseudo-page "sandbox[postcomment]". The +> special `postcomment(glob)` [[ikiwiki/pagespec]] returns true if the page ends with +> "[postcomment]" and the part before (e.g. sandbox) matches the glob. So, you can +> have postcomment(blog/*) or something. (Perhaps instead of taking a glob, postcomment +> should take a pagespec, so you can have postcomment(link(tags/commentable))?) +> +> This is why `anonok_pages => 'postcomment(*)'` and `locked_pages => '!postcomment(*)'` +> are necessary to allow anonymous and logged-in editing (respectively). +> +>> I changed that to move the flag out of the page name, and into a variable that the `match_postcomment` +>> function checks for. Other ugliness still applies. :-) --[[Joey]] +> +> This is ugly - one alternative would be to add `check_permission()` that takes a +> page and a verb (create, edit, rename, remove and maybe comment are the ones I +> can think of so far), use that, and port the plugins you mentioned to use that +> API too. This plugin could either call `check_can("$page/comment1", 'create')` or +> call `check_can($page, 'comment')`. +> +> One odd effect of the code structure I've used is that we check for the ability to +> create the page before we actually know what page name we're going to use - when +> posting the comment I just increment a number until I reach an unused one - so +> either the code needs restructuring, or the permission check for 'create' would +> always be for 'comment1' and never 'comment123'. --[[smcv]] + +>> Now resolved, in fact --[[smcv]] + +> Another possibility is to just check for permission to edit (e.g.) `sandbox/comment1`. +> However, this makes the "comments can only be created, not edited" feature completely +> reliant on the fact that internal pages can't be edited. Perhaps there should be a +> `editable_pages` pagespec, defaulting to `'*'`? --[[smcv]] + +## comments directive vs global setting (resolved?) + +When comments have been enabled generally, you still need to mark which pages +can have comments, by including the `\[[!comments]]` directive in them. By default, +this directive expands to a "post a comment" link plus an `\[[!inline]]` with +the comments. [This requirement has now been removed --[[smcv]]] + +> I don't like this, because it's hard to explain to someone why they have +> to insert this into every post to their blog. Seems that the model used +> for discussion pages could work -- if comments are enabled, automatically +> add the comment posting form and comments to the end of each page. +> --[[Joey]] + +>> I don't think I'd want comments on *every* page (particularly, not the +>> front page). Perhaps a pagespec in the setup file, where the default is "*"? +>> Then control freaks like me could use "link(tags/comments)" and tag pages +>> as allowing comments. +>> +>>> Yes, I think a pagespec is the way to go. --[[Joey]] + +>>>> Implemented --[[smcv]] + +>> +>> The model used for discussion pages does require patching the existing +>> page template, which I was trying to avoid - I'm not convinced that having +>> every possible feature hard-coded there really scales (and obviously it's +>> rather annoying while this plugin is on a branch). --[[smcv]] + +>>> Using the template would allow customising the html around the comments +>>> which seems like a good thing? --[[Joey]] + +>>>> The \[[!comments]] directive is already template-friendly - it expands to +>>>> the contents of the template `comments_embed.tmpl`, possibly with the +>>>> result of an \[[!inline]] appended. I should change `comments_embed.tmpl` +>>>> so it uses a template variable `INLINE` for the inline result rather than +>>>> having the perl code concatenate it, which would allow a bit more +>>>> customization (whether the "post" link was before or after the inline). +>>>> Even if you want comments in page.tmpl, keeping the separate comments_embed.tmpl +>>>> and having a `COMMENTS` variable in page.tmpl might be the way forward, +>>>> since the smaller each templates is, the easier it will be for users +>>>> to maintain a patched set of templates. (I think so, anyway, based on what happens +>>>> with dpkg prompts in Debian packages with monolithic vs split +>>>> conffiles.) --[[smcv]] + +>>>>> I've switched my branch to use page.tmpl instead; see what you think? --[[smcv]] + +## Raw HTML (resolved?) + +Raw HTML was not initially allowed by default (this was configurable). + +> I'm not sure that raw html should be a problem, as long as the +> htmlsanitizer and htmlbalanced plugins are enabled. I can see filtering +> out directives, as a special case. --[[Joey]] + +>> Right, if I sanitize each post individually, with htmlscrubber and either htmltidy +>> or htmlbalance turned on, then there should be no way the user can forge a comment; +>> I was initially wary of allowing meta directives, but I think those are OK, as long +>> as the comment template puts the \[[!meta author]] at the *end*. Disallowing +>> directives is more a way to avoid commenters causing expensive processing than +>> anything else, at this point. +>> +>> I've rebased the plugin on master, made it sanitize individual posts' content +>> and removed the option to disallow raw HTML. Sanitizing individual posts before +>> they've been htmlized required me to preserve whitespace in the htmlbalance +>> plugin, so I did that. Alternatively, we could htmlize immediately and always +>> save out raw HTML? --[[smcv]] + +>>> There might be some use cases for other directives, such as img, in +>>> comments. +>>> +>>> I don't know if meta is "safe" (ie, guaranteed to be inexpensive and not +>>> allow users to do annoying things) or if it will continue to be in the +>>> future. Hard to predict really, all that can be said with certainty is +>>> all directives will contine to be inexpensive and safe enough that it's +>>> sensible to allow users to (ab)use them on open wikis. +>>> --[[Joey]] diff --git a/doc/plugins/contrib/headinganchors.mdwn b/doc/plugins/contrib/headinganchors.mdwn index ef2fa122a..c80cc0b49 100644 --- a/doc/plugins/contrib/headinganchors.mdwn +++ b/doc/plugins/contrib/headinganchors.mdwn @@ -12,9 +12,9 @@ rst and any other format that produces html. The code is available here: use strict; use IkiWiki 2.00; - sub import { #{{{ + sub import { hook(type => "sanitize", id => "headinganchors", call => \&headinganchors); - } # }}} + } sub text_to_anchor { my $str = shift; @@ -26,11 +26,11 @@ rst and any other format that produces html. The code is available here: return $str; } - sub headinganchors (@) { #{{{ + sub headinganchors (@) { my %params=@_; my $content=$params{content}; $content=~s{<h([0-9])>([^>]*)</h([0-9])>}{'<h'.$1.' id="'.text_to_anchor($2).'">'.$2.'</h'.$3.'>'}gie; return $content; - } # }}} + } 1 diff --git a/doc/plugins/contrib/po.mdwn b/doc/plugins/contrib/po.mdwn index 0fd06cb81..f90ffeed2 100644 --- a/doc/plugins/contrib/po.mdwn +++ b/doc/plugins/contrib/po.mdwn @@ -160,3 +160,6 @@ Any thoughts on this? >>>>> I am able to do myself in this area. --[[intrigeri]] >>>>>> >>>>>> I came up with a patch for the WrapI18N issue --[[Joey]] + +I've set this plugin development aside for a while. I will be back and +finish it at some point in the first quarter of 2009. --[[intrigeri]] diff --git a/doc/plugins/contrib/siterel2pagerel.mdwn b/doc/plugins/contrib/siterel2pagerel.mdwn index 956b6728f..9b09657bf 100644 --- a/doc/plugins/contrib/siterel2pagerel.mdwn +++ b/doc/plugins/contrib/siterel2pagerel.mdwn @@ -13,11 +13,11 @@ other format that produces html. The code is available here: use strict; use IkiWiki 2.00; - sub import { #{{{ + sub import { hook(type => "sanitize", id => "siterel2pagerel", call => \&siterel2pagerel); - } # }}} + } - sub siterel2pagerel (@) { #{{{ + sub siterel2pagerel (@) { my %params=@_; my $baseurl=IkiWiki::baseurl($params{page}); my $content=$params{content}; @@ -25,6 +25,6 @@ other format that produces html. The code is available here: $content=~s/(<img(?:\s+(?:class|id|width|height)\s*="?\w+"?)*)\s+src=\s*"\/([^"]*)"/$1 src="$baseurl$2"/mig; # FIXME: do <script and everything else that can have URLs in it return $content; - } # }}} + } 1 diff --git a/doc/plugins/contrib/unixauth.mdwn b/doc/plugins/contrib/unixauth.mdwn index d91ed45f1..76a847744 100644 --- a/doc/plugins/contrib/unixauth.mdwn +++ b/doc/plugins/contrib/unixauth.mdwn @@ -22,7 +22,7 @@ __Security__: [As with passwordauth](/security/#index14h2), be wary of sending u --- Wrapper.pm.orig 2008-07-29 00:09:10.000000000 -0400 +++ Wrapper.pm - @@ -28,7 +28,7 @@ sub gen_wrapper () { #{{{ + @@ -28,7 +28,7 @@ sub gen_wrapper () { my @envsave; push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE @@ -46,16 +46,16 @@ __Security__: [As with passwordauth](/security/#index14h2), be wary of sending u use strict; use IkiWiki 2.00; - sub import { #{{{ + sub import { hook(type => "getsetup", id => "unixauth", call => \&getsetup); hook(type => "formbuilder_setup", id => "unixauth", call => \&formbuilder_setup); hook(type => "formbuilder", id => "unixauth", call => \&formbuilder); hook(type => "sessioncgi", id => "unixauth", call => \&sessioncgi); - } # }}} + } - sub getsetup () { #{{{ + sub getsetup () { return unixauth_type => { type => "string", @@ -83,10 +83,10 @@ __Security__: [As with passwordauth](/security/#index14h2), be wary of sending u safe => 0, rebuild => 1, }, - } #}}} + } # Checks if a string matches a user's password, and returns true or false. - sub checkpassword ($$;$) { #{{{ + sub checkpassword ($$;$) { my $user=shift; my $password=shift; my $field=shift || "password"; @@ -131,9 +131,9 @@ __Security__: [As with passwordauth](/security/#index14h2), be wary of sending u } return $ret; - } #}}} + } - sub formbuilder_setup (@) { #{{{ + sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; @@ -204,7 +204,7 @@ __Security__: [As with passwordauth](/security/#index14h2), be wary of sending u } } - sub formbuilder (@) { #{{{ + sub formbuilder (@) { my %params=@_; my $form=$params{form}; @@ -225,12 +225,12 @@ __Security__: [As with passwordauth](/security/#index14h2), be wary of sending u my $user_name=$form->field('name'); } } - } #}}} + } - sub sessioncgi ($$) { #{{{ + sub sessioncgi ($$) { my $q=shift; my $session=shift; - } #}}} + } 1 diff --git a/doc/plugins/contrib/unixauth/discussion.mdwn b/doc/plugins/contrib/unixauth/discussion.mdwn index dfa4fe6cc..232649863 100644 --- a/doc/plugins/contrib/unixauth/discussion.mdwn +++ b/doc/plugins/contrib/unixauth/discussion.mdwn @@ -32,3 +32,7 @@ I've added support for [checkpassword](http://cr.yp.to/checkpwd/interface.html), > to disentangle the two locks. --[[Joey]] >> Ah, ok, I misunderstood your comment. I'll see what I can figure out. --[[schmonz]] + +>>> My time's been limited for this, but I just saw [[todo/avoid_thrashing]]. How does that interact with pwauth or checkpassword? --[[schmonz]] + +>>>> The DOS still happens, it just uses less memory. --[[Joey]] diff --git a/doc/plugins/cutpaste.mdwn b/doc/plugins/cutpaste.mdwn index 1b78e60fc..f74f8a269 100644 --- a/doc/plugins/cutpaste.mdwn +++ b/doc/plugins/cutpaste.mdwn @@ -1,4 +1,4 @@ -[[!template id=plugin name=toggle author="[[Enrico]]"]] +[[!template id=plugin name=cutpaste author="[[Enrico]]"]] [[!tag type/chrome]] This plugin provides the [[ikiwiki/directive/cut]], diff --git a/doc/plugins/htmlbalance.mdwn b/doc/plugins/htmlbalance.mdwn new file mode 100644 index 000000000..f4e2298ee --- /dev/null +++ b/doc/plugins/htmlbalance.mdwn @@ -0,0 +1,9 @@ +[[!template id=plugin name=htmlbalance author="[[Simon_McVittie|smcv]]"]] +[[!tag type/html]] + +This plugin ensures that the HTML emitted by ikiwiki contains well-balanced +HTML tags, by parsing it with [[!cpan HTML::TreeBuilder]] and re-serializing it. This +acts as a lighter-weight alternative to [[plugins/htmltidy]]; it doesn't +ensure validity, but it does at least ensure that formatting from a +blog post pulled in by the [[ikiwiki/directive/inline]] directive doesn't +leak into the rest of the page. diff --git a/doc/plugins/htmlbalance/discussion.mdwn b/doc/plugins/htmlbalance/discussion.mdwn new file mode 100644 index 000000000..c66528a4f --- /dev/null +++ b/doc/plugins/htmlbalance/discussion.mdwn @@ -0,0 +1,10 @@ +Would it be possible to use [[!cpan HTML::Entities]] rather than +`XML::Atom::Util` for encoding entities? The former is already an ikiwiki +dependency (via [[!cpan HTML::Parser]]). + +> Now switched to HTML::Entities --[[Joey]] + +I also wonder if there's any benefit to using this plugin aside from with +aggregate. Perhaps a small one but aggregate seems like the main case.. +wondering if it would be better to just have aggregate balanace the html +automatically and do away with the separate plugin. --[[Joey]] diff --git a/doc/plugins/htmlscrubber.mdwn b/doc/plugins/htmlscrubber.mdwn index 7db372e1b..b9f7e6d22 100644 --- a/doc/plugins/htmlscrubber.mdwn +++ b/doc/plugins/htmlscrubber.mdwn @@ -32,7 +32,7 @@ other HTML-related functionality, such as whether [[meta]] allows potentially unsafe HTML tags. The `htmlscrubber_skip` configuration setting can be used to skip scrubbing -of some pages. Set it to a [[PageSpec]], such as "!*/Discussion", and pages +of some pages. Set it to a [[ikiwiki/PageSpec]], such as "!*/Discussion", and pages matching that can have all the evil CSS, JavsScript, and unsafe html elements you like. One safe way to use this is to use [[lockedit]] to lock those pages, so only admins can edit them. diff --git a/doc/plugins/htmltidy.mdwn b/doc/plugins/htmltidy.mdwn index f675a01ae..580e56f59 100644 --- a/doc/plugins/htmltidy.mdwn +++ b/doc/plugins/htmltidy.mdwn @@ -7,4 +7,5 @@ emitted by ikiwiki. Besides being nicely formatted, this helps ensure that even if users enter suboptimal html, your wiki generates valid html. Note that since tidy is an external program, that is run each time a page -is built, this plugin will slow ikiwiki down somewhat. +is built, this plugin will slow ikiwiki down somewhat. [[plugins/htmlbalance]] +might provide a faster alternative. diff --git a/doc/plugins/lockedit.mdwn b/doc/plugins/lockedit.mdwn index 71bf232ab..d2e98e07a 100644 --- a/doc/plugins/lockedit.mdwn +++ b/doc/plugins/lockedit.mdwn @@ -17,6 +17,10 @@ One handy thing to do if you're using ikiwiki for your blog is to lock posts in your blog, while still letting them comment via the Discussion pages. +Alternatively, if you're using the [[comments]] plugin, you can lock +"!postcomment(*)" to allow users to comment on pages, but not edit anything +else. + Wiki administrators can always edit locked pages. The [[ikiwiki/PageSpec]] can specify that some pages are not locked for some users. For example, "important_page and !user(joey)" locks `important_page` while still diff --git a/doc/plugins/pagecount.mdwn b/doc/plugins/pagecount.mdwn index 6235963d3..a56027e60 100644 --- a/doc/plugins/pagecount.mdwn +++ b/doc/plugins/pagecount.mdwn @@ -6,5 +6,5 @@ This plugin provides the [[ikiwiki/directive/pagecount]] currently in the wiki. If it is turned on it can tell us that this wiki includes -[[!pagecount pages="* and !recentchanges"]] -pages, of which [[!pagecount pages="*/Discussion"]] are discussion pages. +[[!pagecount ]] pages, of which +[[!pagecount pages="*/Discussion"]] are discussion pages. diff --git a/doc/plugins/write.mdwn b/doc/plugins/write.mdwn index abcabbdc3..9b5cf27f7 100644 --- a/doc/plugins/write.mdwn +++ b/doc/plugins/write.mdwn @@ -55,8 +55,8 @@ plugin, and a "call" parameter, which tells what function to call for the hook. An optional "last" parameter, if set to a true value, makes the hook run -after all other hooks of its type. Useful if the hook depends on some other -hook being run first. +after all other hooks of its type, and an optional "first" parameter makes +it run first. Useful if the hook depends on some other hook being run first. ## Types of hooks @@ -696,11 +696,15 @@ This can be called when creating a new page, to determine what filename to save the page to. It's passed a page name, and its type, and returns the name of the file to create, relative to the srcdir. -#### `targetpage($$)` +#### `targetpage($$;$)` Passed a page and an extension, returns the filename that page will be rendered to. +Optionally, a third parameter can be passed, to specify the preferred +filename of the page. For example, `targetpage("foo", "rss", "feed")` +will yield something like `foo/feed.rss`. + ## Miscellaneous ### Internal use pages diff --git a/doc/plugins/write/discussion.mdwn b/doc/plugins/write/discussion.mdwn index 9a36d7b0b..24a556ffe 100644 --- a/doc/plugins/write/discussion.mdwn +++ b/doc/plugins/write/discussion.mdwn @@ -43,4 +43,4 @@ distributed wiki. --- -I would find this page clearer split up into sub-pages. Does anyone agree/disagree? -- [[JonDowland]] +I would find this page clearer split up into sub-pages. Does anyone agree/disagree? -- [[users/Jon]] diff --git a/doc/rcs/git.mdwn b/doc/rcs/git.mdwn index 6ba0da894..deddfbd6d 100644 --- a/doc/rcs/git.mdwn +++ b/doc/rcs/git.mdwn @@ -124,8 +124,8 @@ ignores the git authorship information, and uses the username of the unix user who made the commit. Then tests including the `locked_pages` [[PageSpec]] are checked to see if that user can edit the pages in the commit. -You can even set up an anonymous user, to allow anyone to push -changes in via git rather than using the web interface. +You can even set up an [[anonymous_user|tips/untrusted_git_push]], to allow +anyone to push changes in via git rather than using the web interface. ## Optionally using a local wiki to preview changes diff --git a/doc/roadmap.mdwn b/doc/roadmap.mdwn index 488e2dec8..9ed5742eb 100644 --- a/doc/roadmap.mdwn +++ b/doc/roadmap.mdwn @@ -52,7 +52,7 @@ Version 3.0 will be an opportunity to make significant transitions. It will include a vast number of new features, bugfixes, and other improvements, far too many to list here. -Release is planned for fall, 2008. +Release is planned for fall^Wlate, 2008. ---- diff --git a/doc/sandbox.mdwn b/doc/sandbox.mdwn index 986b59eac..582d46e84 100644 --- a/doc/sandbox.mdwn +++ b/doc/sandbox.mdwn @@ -4,7 +4,7 @@ testing 1..2..3!! ---- -Here's a paragraph. สวัสดี +Here's a paragraph. The following code block is pre-formatted: @@ -78,8 +78,6 @@ The haiku will change after every save, mind you. * [[foo]] * WikiLink -Test - ----- This SandBox is also a [[blog]]! diff --git a/doc/sandbox/Omgwtf_a_blof_post__33____33____33____33____33__1__33__1__33__11111__33____33____33__1__33__1__33____33__1five.html b/doc/sandbox/Omgwtf_a_blof_post__33____33____33____33____33__1__33__1__33__11111__33____33____33__1__33__1__33____33__1five.html new file mode 100644 index 000000000..fc1757d6e --- /dev/null +++ b/doc/sandbox/Omgwtf_a_blof_post__33____33____33____33____33__1__33__1__33__11111__33____33____33__1__33__1__33____33__1five.html @@ -0,0 +1,31 @@ +<math xmlns="http://www.w3.org/1998/Math/MathML"> + <mrow> + <msup> + <mfenced open="(" close=")"> + <mrow> + <mi>a</mi> + <mo>+</mo> + + <mi>b</mi> + </mrow> + </mfenced> + <mn>2</mn> + </msup> + <mo>-</mo> + <msub> + + <mfenced open="{" close="}"> + <mrow> + <mi>x</mi> + <mo>+</mo> + <mi>y</mi> + </mrow> + </mfenced> + + <mi>i</mi> + </msub> + </mrow> +</math> +<br> +test <b>test</b><abbr title="test">T.</abbr> <h1>test</h1> +<a href="https://bugzilla.mozilla.org">øđ</a> diff --git a/doc/sandbox/castle/discussion/jon_tests_too.mdwn b/doc/sandbox/castle/discussion/jon_tests_too.mdwn index 864f38c0d..bc051b008 100644 --- a/doc/sandbox/castle/discussion/jon_tests_too.mdwn +++ b/doc/sandbox/castle/discussion/jon_tests_too.mdwn @@ -1,3 +1,3 @@ -I recall testing this too, but I'm not sure where the test went. Let's try again. -- [[JonDowland]] +I recall testing this too, but I'm not sure where the test went. Let's try again. -- [[users/Jon]] Context: [[todo/discussion_page_as_blog/discussion/castle]] diff --git a/doc/setup/discussion.mdwn b/doc/setup/discussion.mdwn index 7d8c525e7..3ec123eb5 100644 --- a/doc/setup/discussion.mdwn +++ b/doc/setup/discussion.mdwn @@ -138,3 +138,14 @@ Thanks for your response. You're right. Ubuntu does have ikiwiki, except that it Anyway, I think I might be able to install it from the tarball I downloaded. I've been reading the discussions, had a look at your screencasts, etc. I will give it another bash. -- [[WillDioneda]] ---- + +How do I set up cgi editing? In setup I have: + + * cgiurl => 'http://wiki.had.co.nz/edit.cgi' + * cgi_wrapper => 'edit.cgi' + +But I don't get an edit link on my pages? What am I doing wrong? + +> Assuming you don't have the editpage plugin disabled, all you should need +> to so is re-run `ikiwiki -setup` with the above config and it should +> rebuild your wiki and add the edit links to pages. --[[Joey]] diff --git a/doc/style.css b/doc/style.css index 5787ef65e..81a260afd 100644 --- a/doc/style.css +++ b/doc/style.css @@ -12,7 +12,7 @@ display: block; } -.author { +.inlineheader .author { margin: 0; font-size: 18px; font-weight: bold; @@ -372,3 +372,18 @@ legend { span.color { padding: 2px; } + +.comment-header { + font-style: italic; + margin-top: .3em; +} +.comment .author { + font-weight: bold; +} +.comment-subject { + font-weight: bold; +} +.comment { + border: 1px solid #aaa; + padding: 3px; +} diff --git a/doc/tips/DreamHost.mdwn b/doc/tips/DreamHost.mdwn index 6670f8090..070638e3e 100644 --- a/doc/tips/DreamHost.mdwn +++ b/doc/tips/DreamHost.mdwn @@ -150,6 +150,8 @@ Next, add your installed Perl module directory to the *libdir* parameter. It sh libdir => "/home/.server/user/site/perl/lib/perl5/", # CGI Wrapper +The CGI wrapper file will be created automatically by "ikiwiki --setup path/to/setup", as long as you have inserted a valid filename to be created into the setup file. On DreamHost, be careful not to put the ikiwiki.cgi file in a directory that has different owner/group than the file itself (such as the main site.domain.tld/ directory): this will cause suexec to fail. + The wrapper mode of "06755" doesn't seem to work. "755" appears to. However, this may be completely insecure and/or buggy, so if you know better than I, edit this doc and add it here. # Pre-created SVN repository diff --git a/doc/tips/comments_feed.mdwn b/doc/tips/comments_feed.mdwn new file mode 100644 index 000000000..6f8137256 --- /dev/null +++ b/doc/tips/comments_feed.mdwn @@ -0,0 +1,10 @@ +You've enabled the [[plugins/comments]] plugin, so a set of pages on your +blog can have comments added to them. Pages with comments even have special +feeds that can be used to subscribe to those comments. But you'd like to +add a feed that contains all the comments posted to any page. Here's how: + + \[[!inline pages="internal(*/comment_*)" template=comment]] + +The special [[ikiwiki/PageSpec]] matches all comments. The +[[template|wikitemplates]] causes the comments to be displayed formatted +nicely. diff --git a/doc/tips/convert_mediawiki_to_ikiwiki/discussion.mdwn b/doc/tips/convert_mediawiki_to_ikiwiki/discussion.mdwn index 6e5f1668a..15ddccb92 100644 --- a/doc/tips/convert_mediawiki_to_ikiwiki/discussion.mdwn +++ b/doc/tips/convert_mediawiki_to_ikiwiki/discussion.mdwn @@ -11,4 +11,602 @@ there are some variations on the approach that might be useful: Also, some detail on converting mediawiki transclusion to ikiwiki inlines... --- [[JonDowland]] +-- [[users/Jon]] + +> "Who knows, the remote site might disappear.". Right now, it appears to +> have done just that. -- [[users/Jon]] + + +The iki-fast-load ruby script from the u32 page is given below: + + #!/usr/bin/env ruby + + # This script is called on the final sorted, de-spammed revision + # XML file. + # + # It doesn't currently check for no-op revisions... I believe + # that git-fast-load will dutifully load them even though nothing + # happened. I don't care to solve this by adding a file cache + # to this script. You can run iki-diff-next.rb to highlight any + # empty revisions that need to be removed. + # + # This turns each node into an equivalent file. + # It does not convert spaces to underscores in file names. + # This would break wikilinks. + # I suppose you could fix this with mod_speling or mod_rewrite. + # + # It replaces nodes in the Image: namespace with the files themselves. + + + require 'rubygems' + require 'node-callback' + require 'time' + require 'ostruct' + + + # pipe is the stream to receive the git-fast-import commands + # putfrom is true if this branch has existing commits on it, false if not. + def format_git_commit(pipe, f) + # Need to escape backslashes and double-quotes for git? + # No, git breaks when I do this. + # For the filename "path with \\", git sez: bad default revision 'HEAD' + # filename = '"' + filename.gsub('\\', '\\\\\\\\').gsub('"', '\\"') + '"' + + # In the calls below, length must be the size in bytes!! + # TODO: I haven't figured out how this works in the land of UTF8 and Ruby 1.9. + pipe.puts "commit #{f.branch}" + pipe.puts "committer #{f.username} <#{f.email}> #{f.timestamp.rfc2822}" + pipe.puts "data #{f.message.length}\n#{f.message}\n" + pipe.puts "from #{f.branch}^0" if f.putfrom + pipe.puts "M 644 inline #{f.filename}" + pipe.puts "data #{f.content.length}\n#{f.content}\n" + pipe.puts + end + + +Mediawiki.pm - A plugin which supports mediawiki format. + + #!/usr/bin/perl + # By Scott Bronson. Licensed under the GPLv2+ License. + # Extends Ikiwiki to be able to handle Mediawiki markup. + # + # To use the Mediawiki Plugin: + # - Install Text::MediawikiFormat + # - Turn of prefix_directives in your setup file. + # (TODO: we probably don't need to do this anymore?) + # prefix_directives => 1, + # - Add this plugin on Ikiwiki's path (perl -V, look for @INC) + # cp mediawiki.pm something/IkiWiki/Plugin + # - And enable it in your setup file + # add_plugins => [qw{mediawiki}], + # - Finally, turn off the link plugin in setup (this is important) + # disable_plugins => [qw{link}], + # - Rebuild everything (actually, this should be automatic right?) + # - Now all files with a .mediawiki extension should be rendered properly. + + + package IkiWiki::Plugin::mediawiki; + + use warnings; + use strict; + use IkiWiki 2.00; + use URI; + + + # This is a gross hack... We disable the link plugin so that our + # linkify routine is always called. Then we call the link plugin + # directly for all non-mediawiki pages. Ouch... Hopefully Ikiwiki + # will be updated soon to support multiple link plugins. + require IkiWiki::Plugin::link; + + # Even if T:MwF is not installed, we can still handle all the linking. + # The user will just see Mediawiki markup rather than formatted markup. + eval q{use Text::MediawikiFormat ()}; + my $markup_disabled = $@; + + # Work around a UTF8 bug in Text::MediawikiFormat + # http://rt.cpan.org/Public/Bug/Display.html?id=26880 + unless($markup_disabled) { + no strict 'refs'; + no warnings; + *{'Text::MediawikiFormat::uri_escape'} = \&URI::Escape::uri_escape_utf8; + } + + my %metaheaders; # keeps track of redirects for pagetemplate. + my %tags; # keeps track of tags for pagetemplate. + + + sub import { #{{{ + hook(type => "checkconfig", id => "mediawiki", call => \&checkconfig); + hook(type => "scan", id => "mediawiki", call => \&scan); + hook(type => "linkify", id => "mediawiki", call => \&linkify); + hook(type => "htmlize", id => "mediawiki", call => \&htmlize); + hook(type => "pagetemplate", id => "mediawiki", call => \&pagetemplate); + } # }}} + + + sub checkconfig + { + return IkiWiki::Plugin::link::checkconfig(@_); + } + + + my $link_regexp = qr{ + \[\[(?=[^!]) # beginning of link + ([^\n\r\]#|<>]+) # 1: page to link to + (?: + \# # '#', beginning of anchor + ([^|\]]+) # 2: anchor text + )? # optional + + (?: + \| # followed by '|' + ([^\]\|]*) # 3: link text + )? # optional + \]\] # end of link + ([a-zA-Z]*) # optional trailing alphas + }x; + + + # Convert spaces in the passed-in string into underscores. + # If passed in undef, returns undef without throwing errors. + sub underscorize + { + my $var = shift; + $var =~ tr{ }{_} if $var; + return $var; + } + + + # Underscorize, strip leading and trailing space, and scrunch + # multiple runs of spaces into one underscore. + sub scrunch + { + my $var = shift; + if($var) { + $var =~ s/^\s+|\s+$//g; # strip leading and trailing space + $var =~ s/\s+/ /g; # squash multiple spaces to one + } + return $var; + } + + + # Translates Mediawiki paths into Ikiwiki paths. + # It needs to be pretty careful because Mediawiki and Ikiwiki handle + # relative vs. absolute exactly opposite from each other. + sub translate_path + { + my $page = shift; + my $path = scrunch(shift); + + # always start from root unless we're doing relative shenanigans. + $page = "/" unless $path =~ /^(?:\/|\.\.)/; + + my @result = (); + for(split(/\//, "$page/$path")) { + if($_ eq '..') { + pop @result; + } else { + push @result, $_ if $_ ne ""; + } + } + + # temporary hack working around http://ikiwiki.info/bugs/Can__39__t_create_root_page/index.html?updated + # put this back the way it was once this bug is fixed upstream. + # This is actually a major problem because now Mediawiki pages can't link from /Git/git-svn to /git-svn. And upstream appears to be uninterested in fixing this bug. :( + # return "/" . join("/", @result); + return join("/", @result); + } + + + # Figures out the human-readable text for a wikilink + sub linktext + { + my($page, $inlink, $anchor, $title, $trailing) = @_; + my $link = translate_path($page,$inlink); + + # translate_path always produces an absolute link. + # get rid of the leading slash before we display this link. + $link =~ s#^/##; + + my $out = ""; + if($title) { + $out = IkiWiki::pagetitle($title); + } else { + $link = $inlink if $inlink =~ /^\s*\//; + $out = $anchor ? "$link#$anchor" : $link; + if(defined $title && $title eq "") { + # a bare pipe appeared in the link... + # user wants to strip namespace and trailing parens. + $out =~ s/^[A-Za-z0-9_-]*://; + $out =~ s/\s*\(.*\)\s*$//; + } + # A trailing slash suppresses the leading slash + $out =~ s#^/(.*)/$#$1#; + } + $out .= $trailing if defined $trailing; + return $out; + } + + + sub tagpage ($) + { + my $tag=shift; + + if (exists $config{tagbase} && defined $config{tagbase}) { + $tag=$config{tagbase}."/".$tag; + } + + return $tag; + } + + + # Pass a URL and optional text associated with it. This call turns + # it into fully-formatted HTML the same way Mediawiki would. + # Counter is used to number untitled links sequentially on the page. + # It should be set to 1 when you start parsing a new page. This call + # increments it automatically. + sub generate_external_link + { + my $url = shift; + my $text = shift; + my $counter = shift; + + # Mediawiki trims off trailing commas. + # And apparently it does entity substitution first. + # Since we can't, we'll fake it. + + # trim any leading and trailing whitespace + $url =~ s/^\s+|\s+$//g; + + # url properly terminates on > but must special-case > + my $trailer = ""; + $url =~ s{(\&(?:gt|lt)\;.*)$}{ $trailer = $1, ''; }eg; + + # Trim some potential trailing chars, put them outside the link. + my $tmptrail = ""; + $url =~ s{([,)]+)$}{ $tmptrail .= $1, ''; }eg; + $trailer = $tmptrail . $trailer; + + my $title = $url; + if(defined $text) { + if($text eq "") { + $text = "[$$counter]"; + $$counter += 1; + } + $text =~ s/^\s+|\s+$//g; + $text =~ s/^\|//; + } else { + $text = $url; + } + + return "<a href='$url' title='$title'>$text</a>$trailer"; + } + + + # Called to handle bookmarks like [[#heading]] or <span class="createlink"><a href="http://u32.net/cgi-bin/ikiwiki.cgi?page=%20text%20&from=Mediawiki_Plugin%2Fmediawiki&do=create" rel="nofollow">?</a>#a</span> + sub generate_fragment_link + { + my $url = shift; + my $text = shift; + + my $inurl = $url; + my $intext = $text; + $url = scrunch($url); + + if(defined($text) && $text ne "") { + $text = scrunch($text); + } else { + $text = $url; + } + + $url = underscorize($url); + + # For some reason Mediawiki puts blank titles on all its fragment links. + # I don't see why we would duplicate that behavior here. + return "<a href='$url'>$text</a>"; + } + + + sub generate_internal_link + { + my($page, $inlink, $anchor, $title, $trailing, $proc) = @_; + + # Ikiwiki's link link plugin wrecks this line when displaying on the site. + # Until the code highlighter plugin can turn off link finding, + # always escape double brackets in double quotes: [[ + if($inlink eq '..') { + # Mediawiki doesn't touch links like [[..#hi|ho]]. + return "[[" . $inlink . ($anchor?"#$anchor":"") . + ($title?"|$title":"") . "]]" . $trailing; + } + + my($linkpage, $linktext); + if($inlink =~ /^ (:?) \s* Category (\s* \: \s*) ([^\]]*) $/x) { + # Handle category links + my $sep = $2; + $inlink = $3; + $linkpage = IkiWiki::linkpage(translate_path($page, $inlink)); + if($1) { + # Produce a link but don't add this page to the given category. + $linkpage = tagpage($linkpage); + $linktext = ($title ? '' : "Category$sep") . + linktext($page, $inlink, $anchor, $title, $trailing); + $tags{$page}{$linkpage} = 1; + } else { + # Add this page to the given category but don't produce a link. + $tags{$page}{$linkpage} = 1; + &$proc(tagpage($linkpage), $linktext, $anchor); + return ""; + } + } else { + # It's just a regular link + $linkpage = IkiWiki::linkpage(translate_path($page, $inlink)); + $linktext = linktext($page, $inlink, $anchor, $title, $trailing); + } + + return &$proc($linkpage, $linktext, $anchor); + } + + + sub check_redirect + { + my %params=@_; + + my $page=$params{page}; + my $destpage=$params{destpage}; + my $content=$params{content}; + + return "" if $page ne $destpage; + + if($content !~ /^ \s* \#REDIRECT \s* \[\[ ( [^\]]+ ) \]\]/x) { + # this page isn't a redirect, render it normally. + return undef; + } + + # The rest of this function is copied from the redir clause + # in meta::preprocess and actually handles the redirect. + + my $value = $1; + $value =~ s/^\s+|\s+$//g; + + my $safe=0; + if ($value !~ /^\w+:\/\//) { + # it's a local link + my ($redir_page, $redir_anchor) = split /\#/, $value; + + add_depends($page, $redir_page); + my $link=bestlink($page, underscorize(translate_path($page,$redir_page))); + if (! length $link) { + return "<b>Redirect Error:</b> <nowiki>[[$redir_page]] not found.</nowiki>"; + } + + $value=urlto($link, $page); + $value.='#'.$redir_anchor if defined $redir_anchor; + $safe=1; + + # redir cycle detection + $pagestate{$page}{mediawiki}{redir}=$link; + my $at=$page; + my %seen; + while (exists $pagestate{$at}{mediawiki}{redir}) { + if ($seen{$at}) { + return "<b>Redirect Error:</b> cycle found on <nowiki>[[$at]]</nowiki>"; + } + $seen{$at}=1; + $at=$pagestate{$at}{mediawiki}{redir}; + } + } else { + # it's an external link + $value = encode_entities($value); + } + + my $redir="<meta http-equiv=\"refresh\" content=\"0; URL=$value\" />"; + $redir=scrub($redir) if !$safe; + push @{$metaheaders{$page}}, $redir; + + return "Redirecting to $value ..."; + } + + + # Feed this routine a string containing <nowiki>...</nowiki> sections, + # this routine calls your callback for every section not within nowikis, + # collecting its return values and returning the rewritten string. + sub skip_nowiki + { + my $content = shift; + my $proc = shift; + + my $result = ""; + my $state = 0; + + for(split(/(<nowiki[^>]*>.*?<\/nowiki\s*>)/s, $content)) { + $result .= ($state ? $_ : &$proc($_)); + $state = !$state; + } + + return $result; + } + + + # Converts all links in the page, wiki and otherwise. + sub linkify (@) + { + my %params=@_; + + my $page=$params{page}; + my $destpage=$params{destpage}; + my $content=$params{content}; + + my $file=$pagesources{$page}; + my $type=pagetype($file); + my $counter = 1; + + if($type ne 'mediawiki') { + return IkiWiki::Plugin::link::linkify(@_); + } + + my $redir = check_redirect(%params); + return $redir if defined $redir; + + # this code was copied from MediawikiFormat.pm. + # Heavily changed because MF.pm screws up escaping when it does + # this awful hack: $uricCheat =~ tr/://d; + my $schemas = [qw(http https ftp mailto gopher)]; + my $re = join "|", map {qr/\Q$_\E/} @$schemas; + my $schemes = qr/(?:$re)/; + # And this is copied from URI: + my $reserved = q(;/?@&=+$,); # NOTE: no colon or [] ! + my $uric = quotemeta($reserved) . $URI::unreserved . "%#"; + + my $result = skip_nowiki($content, sub { + $_ = shift; + + # Escape any anchors + #s/<(a[\s>\/])/<$1/ig; + # Disabled because this appears to screw up the aggregate plugin. + # I guess we'll rely on Iki to post-sanitize this sort of stuff. + + # Replace external links, http://blah or [http://blah] + s{\b($schemes:[$uric][:$uric]+)|\[($schemes:[$uric][:$uric]+)([^\]]*?)\]}{ + generate_external_link($1||$2, $3, \$counter) + }eg; + + # Handle links that only contain fragments. + s{ \[\[ \s* (\#[^|\]'"<>&;]+) (?:\| ([^\]'"<>&;]*))? \]\] }{ + generate_fragment_link($1, $2) + }xeg; + + # Match all internal links + s{$link_regexp}{ + generate_internal_link($page, $1, $2, $3, $4, sub { + my($linkpage, $linktext, $anchor) = @_; + return htmllink($page, $destpage, $linkpage, + linktext => $linktext, + anchor => underscorize(scrunch($anchor))); + }); + }eg; + + return $_; + }); + + return $result; + } + + + # Find all WikiLinks in the page. + sub scan (@) + { + my %params = @_; + my $page=$params{page}; + my $content=$params{content}; + + my $file=$pagesources{$page}; + my $type=pagetype($file); + + if($type ne 'mediawiki') { + return IkiWiki::Plugin::link::scan(@_); + } + + skip_nowiki($content, sub { + $_ = shift; + while(/$link_regexp/g) { + generate_internal_link($page, $1, '', '', '', sub { + my($linkpage, $linktext, $anchor) = @_; + push @{$links{$page}}, $linkpage; + return undef; + }); + } + return ''; + }); + } + + + # Convert the page to HTML. + sub htmlize (@) + { + my %params=@_; + my $page = $params{page}; + my $content = $params{content}; + + + return $content if $markup_disabled; + + # Do a little preprocessing to babysit Text::MediawikiFormat + # If a line begins with tabs, T:MwF won't convert it into preformatted blocks. + $content =~ s/^\t/ /mg; + + my $ret = Text::MediawikiFormat::format($content, { + + allowed_tags => [#HTML + # MediawikiFormat default + qw(b big blockquote br caption center cite code dd + div dl dt em font h1 h2 h3 h4 h5 h6 hr i li ol p + pre rb rp rt ruby s samp small strike strong sub + sup table td th tr tt u ul var), + # Mediawiki Specific + qw(nowiki), + # Our additions + qw(del ins), # These should have been added all along. + qw(span), # Mediawiki allows span but that's rather scary...? + qw(a), # this is unfortunate; should handle links after rendering the page. + ], + + allowed_attrs => [ + qw(title align lang dir width height bgcolor), + qw(clear), # BR + qw(noshade), # HR + qw(cite), # BLOCKQUOTE, Q + qw(size face color), # FONT + # For various lists, mostly deprecated but safe + qw(type start value compact), + # Tables + qw(summary width border frame rules cellspacing + cellpadding valign char charoff colgroup col + span abbr axis headers scope rowspan colspan), + qw(id class name style), # For CSS + # Our additions + qw(href), + ], + + }, { + extended => 0, + absolute_links => 0, + implicit_links => 0 + }); + + return $ret; + } + + + # This is only needed to support the check_redirect call. + sub pagetemplate (@) + { + my %params = @_; + my $page = $params{page}; + my $destpage = $params{destpage}; + my $template = $params{template}; + + # handle metaheaders for redirects + if (exists $metaheaders{$page} && $template->query(name => "meta")) { + # avoid duplicate meta lines + my %seen; + $template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$metaheaders{$page}})); + } + + $template->param(tags => [ + map { + link => htmllink($page, $destpage, tagpage($_), rel => "tag") + }, sort keys %{$tags{$page}} + ]) if exists $tags{$page} && %{$tags{$page}} && $template->query(name => "tags"); + + # It's an rss/atom template. Add any categories. + if ($template->query(name => "categories")) { + if (exists $tags{$page} && %{$tags{$page}}) { + $template->param(categories => [map { category => $_ }, + sort keys %{$tags{$page}}]); + } + } + } + + 1 diff --git a/doc/tips/inside_dot_ikiwiki.mdwn b/doc/tips/inside_dot_ikiwiki.mdwn index 1f76ce4bd..b81ffae8d 100644 --- a/doc/tips/inside_dot_ikiwiki.mdwn +++ b/doc/tips/inside_dot_ikiwiki.mdwn @@ -66,7 +66,7 @@ to do it rarely, and the data I've wanted has been different each time. ## the session database -`.ikiwiki/sessions.db` is the session database. See the [[cpan CGI::Session]] +`.ikiwiki/sessions.db` is the session database. See the [[!cpan CGI::Session]] documentation for more details. ## lockfiles diff --git a/doc/tips/inside_dot_ikiwiki/discussion.mdwn b/doc/tips/inside_dot_ikiwiki/discussion.mdwn index c05e7a3e0..34d5b9252 100644 --- a/doc/tips/inside_dot_ikiwiki/discussion.mdwn +++ b/doc/tips/inside_dot_ikiwiki/discussion.mdwn @@ -16,3 +16,50 @@ No idea how this happened. I've blown it away and recreated it but, for future >>> --getctime does. --[[Joey]] >> Alas, I seem to have lost the bad index file to periodic /tmp wiping; I'll send it to you if it happens again. --[[sabr]] + +<!-- Add by Blanko --> + +## Lost password for an user + +This morning, a person has lost its password. I was able to do something to make another password. This is the way I take : + +> You can certianly do that, but do note that ikiwiki will offer to mail a +> user a password reset link if they lost their password. --[[Joey]] + +### Locate the user database + +As tips show us, the user database is in the source file, for an example : + + src/.ikiwiki/userdb + +### See which user to modify + +Because I don't know the real login of the user, I have to read all the database : + + perl -le 'use Storable; my $index=Storable::retrieve("userdb"); use Data::Dumper; print Dumper $index' + +Then I was able to find this : + + 'Utilisateur' => { + 'email' => 'user@pl.fr', + 'cryptresettoken' => '$2a$10$cfVeOoVbFw9VzMlgEbPMsu34pwHIFP84mWlkrs2RCKknZYPZkPffm', + 'password' => '', + 'resettoken' => '', + 'cryptpassword' => '$2a$10$H8bYq.dlb68wpnfJgVZQhOdsF9JQ06cteRfhPQPB5eHKnD5Y3u7au', + 'regdate' => '1226574052' + }, + +Let's have a look to modify lines. + +### Modify the line + +When you have found the line to modify, take the user name, and change its password to **sc** (for an example) : + + perl -le 'use Storable; my $userinfo=Storable::retrieve("userdb"); $userinfo->{"Utilisateur"}->{cryptpassword}=q{$2a$10$7viOHCrUkdAVL135Kr6one1mpZQ/FWYC773G1yZ0EtQciI11sSDRS}; Storable::lock_nstore($userinfo, "userdb")' + perl -le 'use Storable; my $userinfo=Storable::retrieve("userdb"); $userinfo->{"Utilisateur"}->{cryptresettoken}=q{}; Storable::lock_nstore($userinfo, "userdb")' + +Because I don't know how suppress cryptresettoken and resettoken fields, I change their content with *null*. + +After all these modifications, the user *Utilisateur* could connect to its account with the password **sc**, and go to Preferences, then change its password. + +<!-- End of Blanko's modifications --> diff --git a/doc/tips/nearlyfreespeech.mdwn b/doc/tips/nearlyfreespeech.mdwn index 6715f0c29..4b3b02eac 100644 --- a/doc/tips/nearlyfreespeech.mdwn +++ b/doc/tips/nearlyfreespeech.mdwn @@ -81,7 +81,8 @@ Here is an example of how I set up a wiki: nano ikiwiki.setup # Set destdir to /home/htdocs # Set srcdir to /home/private/wiki - # Set url to http://yoursite.nfshost.com/ , set cgiurl likewise + # Set url to http://yoursite.nfshost.com/ + # Set cgiurl to http://yoursite.nfshost.com/ikiwiki.cgi # Uncomment the `rcs => "git"` line. # Set the cgi_wrapper path to /home/htdocs/ikiwiki.cgi # Set the git_wrapper path to /home/private/wiki.git/hooks/post-update diff --git a/doc/tips/untrusted_git_push/discussion.mdwn b/doc/tips/untrusted_git_push/discussion.mdwn index e85625a1b..d95c01ecf 100644 --- a/doc/tips/untrusted_git_push/discussion.mdwn +++ b/doc/tips/untrusted_git_push/discussion.mdwn @@ -24,10 +24,10 @@ Note that the user for the commit is 'jon', and the link points at cgi to create users/jon. I was wondering if that is configurable for users pushing via git. It would be nice perhaps to specify it in some way, perhaps via a git-config setting (user.name?). I'm not too familiar with exactly what the -changeset contains. -- [[JonDowland]] +changeset contains. -- [[users/Jon]] > All ikiwiki can do it look at who git has recorded as the author of > the change (and it looks at the username part of the email address). > You can set `user.email` in `.git/config`. --[[Joey]] -> > Ah, excellent. In which case this *should* DTRT... -- [[JonDowland]] +> > Ah, excellent. In which case this *should* DTRT... -- [[users/Jon]] diff --git a/doc/tips/using_the_web_interface_with_a_real_text_editor.mdwn b/doc/tips/using_the_web_interface_with_a_real_text_editor.mdwn index d696bacdb..cf9327395 100644 --- a/doc/tips/using_the_web_interface_with_a_real_text_editor.mdwn +++ b/doc/tips/using_the_web_interface_with_a_real_text_editor.mdwn @@ -4,3 +4,10 @@ you to use a real text editor like Emacs or Vim to edit the contents of text areas. This allows you to edit ikiwiki pages with a real text editor through the ikiwiki web interface, rather than only with direct commit access. --[[JoshTriplett]] + +For Firefox or Iceweasel users, the vimperator extension is also a good +idea. You can press Ctrl-I in the insert mode of vimperator and switch to +an external editor, e.g. Vim. --[[WeakishJiang]] + +Finally, with wikis configured to allow, [[untrusted_git_push]], you can +ditch the browser altogether. --[[Joey]] diff --git a/doc/tips/vim_syntax_highlighting/discussion.mdwn b/doc/tips/vim_syntax_highlighting/discussion.mdwn index 038854b9f..72cb52aab 100644 --- a/doc/tips/vim_syntax_highlighting/discussion.mdwn +++ b/doc/tips/vim_syntax_highlighting/discussion.mdwn @@ -1,4 +1,4 @@ -I'm going to look at merging this with potwiki.vim (a vim-based personal wiki) so that you can follow wiki-links and auto-create pages etc., direct from vim. (I'm writing this incase I don't get around to it) -- [[JonDowland]] +I'm going to look at merging this with potwiki.vim (a vim-based personal wiki) so that you can follow wiki-links and auto-create pages etc., direct from vim. (I'm writing this incase I don't get around to it) -- [[users/Jon]] ---- diff --git a/doc/todo/Add_DATE_parameter_for_use_in_templates.mdwn b/doc/todo/Add_DATE_parameter_for_use_in_templates.mdwn index c908f57c8..8ecdf36d0 100644 --- a/doc/todo/Add_DATE_parameter_for_use_in_templates.mdwn +++ b/doc/todo/Add_DATE_parameter_for_use_in_templates.mdwn @@ -44,7 +44,7 @@ regenerate this one against that). %config %links %renderedfiles %pagesources %destsources); our $VERSION = 2.00; # plugin interface version, next is ikiwiki version our $version="2.1";my $installdir="/usr"; - @@ -70,6 +70,7 @@ sub defaultconfig () { #{{{ + @@ -70,6 +70,7 @@ sub defaultconfig () { plugin => [qw{mdwn inline htmlscrubber passwordauth openid signinedit lockedit conditional}], timeformat => '%c', @@ -52,27 +52,27 @@ regenerate this one against that). locale => undef, sslcookie => 0, httpauth => 0, - @@ -447,6 +448,15 @@ sub displaytime ($) { #{{{ + @@ -447,6 +448,15 @@ sub displaytime ($) { $config{timeformat}, localtime($time))); - } #}}} + } - +sub displaydate ($) { #{{{ + +sub displaydate ($) { + my $time=shift; + + # strftime doesn't know about encodings, so make sure + # its output is properly treated as utf8 + return decode_utf8(POSIX::strftime( + $config{dateformat}, localtime($time))); - +} #}}} + +} + - sub beautify_url ($) { #{{{ + sub beautify_url ($) { my $url=shift; diff --git a/Plugin/inline.pm b/Plugin/inline.pm index 8f6ab51..7bd6147 100644 --- a/Plugin/inline.pm +++ b/Plugin/inline.pm - @@ -148,6 +148,7 @@ sub preprocess_inline (@) { #{{{ + @@ -148,6 +148,7 @@ sub preprocess_inline (@) { $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage})); $template->param(title => pagetitle(basename($page))); $template->param(ctime => displaytime($pagectime{$page})); diff --git a/doc/todo/Add_camelcase_exclusions.mdwn b/doc/todo/Add_camelcase_exclusions.mdwn new file mode 100644 index 000000000..6b86132a0 --- /dev/null +++ b/doc/todo/Add_camelcase_exclusions.mdwn @@ -0,0 +1,23 @@ +Camelcase currently looks for any and call camelcase words and turns them into wiki links. This patch adds a config item called <code>camelcase_ignore</code> which is an array of camelcase words to ignore. + +<pre> +--- /usr/share/perl5/IkiWiki/Plugin/camelcase.pm.orig 2008-12-24 11:49:14.000000000 +1300 ++++ /usr/share/perl5/IkiWiki/Plugin/camelcase.pm 2008-12-24 12:02:21.000000000 +1300 +@@ -33,7 +33,11 @@ + my $destpage=$params{destpage}; + + $params{content}=~s{$link_regexp}{ +- htmllink($page, $destpage, IkiWiki::linkpage($1)) ++ if (grep {/$1/} @{ $config{'camelcase_ignore'} }) { ++ $1 ++ } else { ++ htmllink($page, $destpage, IkiWiki::linkpage($1)) ++ } + }eg; + + return $params{content}; +</pre> + +--[[puck]] + +[[done]] diff --git a/doc/todo/Add_support_for_latest_Text::Markdown_as_found_on_CPAN.mdwn b/doc/todo/Add_support_for_latest_Text::Markdown_as_found_on_CPAN.mdwn index 222cd8c46..6b9fa0535 100644 --- a/doc/todo/Add_support_for_latest_Text::Markdown_as_found_on_CPAN.mdwn +++ b/doc/todo/Add_support_for_latest_Text::Markdown_as_found_on_CPAN.mdwn @@ -12,7 +12,7 @@ This patch allows IkiWiki to work with either of the two: --- IkiWiki/Plugin/mdwn.pm.orig 2008-03-08 11:33:50.000000000 +0100 +++ IkiWiki/Plugin/mdwn.pm 2008-03-08 13:37:21.000000000 +0100 - @@ -28,14 +28,20 @@ sub htmlize (@) { #{{{ + @@ -28,14 +28,20 @@ sub htmlize (@) { $markdown_sub=\&Markdown::Markdown; } else { diff --git a/doc/todo/Allow_change_of_wiki_file_types.mdwn b/doc/todo/Allow_change_of_wiki_file_types.mdwn index 8a398f2e0..19574b175 100644 --- a/doc/todo/Allow_change_of_wiki_file_types.mdwn +++ b/doc/todo/Allow_change_of_wiki_file_types.mdwn @@ -12,7 +12,7 @@ I was hoping that the [[plugins/rename]] plugin would allow web uses to change t index 527ee88..123b772 100644 --- a/IkiWiki/Plugin/rename.pm +++ b/IkiWiki/Plugin/rename.pm - @@ -43,7 +43,7 @@ sub check_canrename ($$$$$$$) { #{{{ + @@ -43,7 +43,7 @@ sub check_canrename ($$$$$$$) { # Dest checks can be omitted by passing undef. if (defined $dest) { @@ -21,7 +21,7 @@ I was hoping that the [[plugins/rename]] plugin would allow web uses to change t error(gettext("no change to the file name was specified")); } - @@ -54,7 +54,7 @@ sub check_canrename ($$$$$$$) { #{{{ + @@ -54,7 +54,7 @@ sub check_canrename ($$$$$$$) { } # Must not be a known source file. @@ -30,7 +30,7 @@ I was hoping that the [[plugins/rename]] plugin would allow web uses to change t error(sprintf(gettext("%s already exists"), htmllink("", "", $dest, noimageinline => 1))); } - @@ -97,6 +97,24 @@ sub rename_form ($$$) { #{{{ + @@ -97,6 +97,24 @@ sub rename_form ($$$) { $f->field(name => "do", type => "hidden", value => "rename", force => 1); $f->field(name => "page", type => "hidden", value => $page, force => 1); $f->field(name => "new_name", value => IkiWiki::pagetitle($page), size => 60); @@ -55,7 +55,7 @@ I was hoping that the [[plugins/rename]] plugin would allow web uses to change t $f->field(name => "attachment", type => "hidden"); return $f, ["Rename", "Cancel"]; - @@ -223,12 +241,19 @@ sub sessioncgi ($$) { #{{{ + @@ -223,12 +241,19 @@ sub sessioncgi ($$) { my $dest=IkiWiki::possibly_foolish_untaint(IkiWiki::titlepage($q->param("new_name"))); # The extension of dest is the same as src if it's diff --git a/doc/todo/Allow_disabling_edit_and_preferences_links.mdwn b/doc/todo/Allow_disabling_edit_and_preferences_links.mdwn new file mode 100644 index 000000000..1188d1ab2 --- /dev/null +++ b/doc/todo/Allow_disabling_edit_and_preferences_links.mdwn @@ -0,0 +1,48 @@ +This patch allows disabling the edit and preferences link in the config file. It is backwards compatible (so peoples edit and preferences links won't suddenly vanish). + +To disable edit or prefs respectively, add the following to the config file: + +<pre> + 'edit' => 0, + 'prefs' => 0, +</pre> + +Patch: +<pre> +--- /usr/share/perl5/IkiWiki/Render.pm.orig 2008-12-23 16:49:00.000000000 +1300 ++++ /usr/share/perl5/IkiWiki/Render.pm 2008-12-23 16:55:40.000000000 +1300 +@@ -80,8 +80,10 @@ + my $actions=0; + + if (length $config{cgiurl}) { +- $template->param(editurl => cgiurl(do => "edit", page => $page)); +- $template->param(prefsurl => cgiurl(do => "prefs")); ++ $template->param(editurl => cgiurl(do => "edit", page => $page)) ++ if ! defined $config{edit} || (defined $config{edit} && $config{edit} == 1); ++ $template->param(prefsurl => cgiurl(do => "prefs")) ++ if ! defined $config{prefs} || (defined $config{prefs} && $config{prefs} == 1); + $actions++; + } + +</pre> + +> On irc, you said, "That was to allow the hack to of using wikistatedir to +> allow me to generate two websites, one with inline editting, the other a +> static page for public consumption." +> +> The edit and preferences links can already be disabled by editing +> `page.tmpl`. (Look for PREFSURL and EDITURL). +> +> More to the point though, disabling those links does not disable anyone +> consticting the urls by hand and logging in and editing a page. So you'd +> really want to disable the editpage plugin in the setup file for the +> public, static wiki. Sounds like you might also want to turn off cgi +> entirely for that build. --[[Joey]] + +>> I want to retain the same page.tmpl for both sites (different templates +>> will just increase the maintenance hell), so disabling the links in the +>> config for one public site works better in my case. +>> +>> I do have the editpage plugin disabled for the public static wiki, but +>> the link still appears on the site. I want to keep the cgi on, so that +>> the site is still searchable. --[[puck]] diff --git a/doc/todo/Allow_edittemplate_to_set_file_type.mdwn b/doc/todo/Allow_edittemplate_to_set_file_type.mdwn index b49968c18..1b99a4e05 100644 --- a/doc/todo/Allow_edittemplate_to_set_file_type.mdwn +++ b/doc/todo/Allow_edittemplate_to_set_file_type.mdwn @@ -14,7 +14,7 @@ edittemplate there. --[[Joey]] index 98308de..c381940 100644 --- a/IkiWiki/Plugin/edittemplate.pm +++ b/IkiWiki/Plugin/edittemplate.pm - @@ -56,8 +56,14 @@ sub preprocess (@) { #{{{ + @@ -56,8 +56,14 @@ sub preprocess (@) { $pagestate{$params{page}}{edittemplate}{$params{match}}=$params{template}; @@ -28,10 +28,10 @@ edittemplate there. --[[Joey]] + + return sprintf(gettext("edittemplate: %s registered for %s"), + $linkHTML, $params{match}); - } # }}} + } - sub formbuilder (@) { #{{{ - @@ -89,6 +95,9 @@ sub formbuilder (@) { #{{{ + sub formbuilder (@) { + @@ -89,6 +95,9 @@ sub formbuilder (@) { if (pagespec_match($p, $pagespec, location => $registering_page)) { $form->field(name => "editcontent", value => filltemplate($pagestate{$registering_page}{edittemplate}{$pagespec}, $page)); diff --git a/doc/todo/Bestdir_along_with_bestlink_in_IkiWiki.pm.mdwn b/doc/todo/Bestdir_along_with_bestlink_in_IkiWiki.pm.mdwn index 73157a326..95c38f794 100644 --- a/doc/todo/Bestdir_along_with_bestlink_in_IkiWiki.pm.mdwn +++ b/doc/todo/Bestdir_along_with_bestlink_in_IkiWiki.pm.mdwn @@ -8,9 +8,9 @@ This patch adds function bestdir() which returns best directory from the directo +++ IkiWiki.pm (working copy) @@ -391,6 +391,35 @@ return ""; - } #}}} + } - +sub bestdir ($$) { #{{{ + +sub bestdir ($$) { + my $page=shift; + my $link=shift; + my $cwd=$page; @@ -37,9 +37,9 @@ This patch adds function bestdir() which returns best directory from the directo + } + + return ""; - +} #}}} + +} + - sub isinlinableimage ($) { #{{{ + sub isinlinableimage ($) { my $file=shift; ---- diff --git a/doc/todo/Default_text_for_new_pages.mdwn b/doc/todo/Default_text_for_new_pages.mdwn index 4a17bbf8b..a904f8287 100644 --- a/doc/todo/Default_text_for_new_pages.mdwn +++ b/doc/todo/Default_text_for_new_pages.mdwn @@ -15,7 +15,7 @@ Inline below is a [[patch]] that implements this: index bb21ed2..10c985c 100644 --- a/IkiWiki/Plugin/editpage.pm +++ b/IkiWiki/Plugin/editpage.pm - @@ -60,7 +60,7 @@ sub cgi_editpage ($$) { #{{{ + @@ -60,7 +60,7 @@ sub cgi_editpage ($$) { decode_cgi_utf8($q); @@ -24,7 +24,7 @@ Inline below is a [[patch]] that implements this: my @buttons=("Save Page", "Preview", "Cancel"); eval q{use CGI::FormBuilder}; error($@) if $@; - @@ -117,9 +117,20 @@ sub cgi_editpage ($$) { #{{{ + @@ -117,9 +117,20 @@ sub cgi_editpage ($$) { } else { $type=$form->param('type'); @@ -45,7 +45,7 @@ Inline below is a [[patch]] that implements this: elsif (defined $from && exists $pagesources{$from}) { # favor the type of linking page $type=pagetype($pagesources{$from}); - @@ -129,7 +140,7 @@ sub cgi_editpage ($$) { #{{{ + @@ -129,7 +140,7 @@ sub cgi_editpage ($$) { if (! $form->submitted) { $form->field(name => "rcsinfo", value => "", force => 1); } @@ -58,7 +58,7 @@ Inline below is a [[patch]] that implements this: index 8efef3f..075d7d8 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm - @@ -271,6 +271,7 @@ sub preprocess_inline (@) { #{{{ + @@ -271,6 +271,7 @@ sub preprocess_inline (@) { $rootpage=$params{page}; } $formtemplate->param(rootpage => $rootpage); diff --git a/doc/todo/Give_access_to_more_TMPL__95__VAR_variables_in_templates_inserted_by_the_template_plugin.mdwn b/doc/todo/Give_access_to_more_TMPL__95__VAR_variables_in_templates_inserted_by_the_template_plugin.mdwn index a644e236b..c71250b3a 100644 --- a/doc/todo/Give_access_to_more_TMPL__95__VAR_variables_in_templates_inserted_by_the_template_plugin.mdwn +++ b/doc/todo/Give_access_to_more_TMPL__95__VAR_variables_in_templates_inserted_by_the_template_plugin.mdwn @@ -94,7 +94,7 @@ most possible of these pages. > index a6e34fc..bb9dd8d 100644 > --- a/IkiWiki/Plugin/template.pm > +++ b/IkiWiki/Plugin/template.pm -> @@ -57,6 +57,8 @@ sub preprocess (@) { #{{{ +> @@ -57,6 +57,8 @@ sub preprocess (@) { > } > } > diff --git a/doc/todo/Improve_display_of_OpenIDs.mdwn b/doc/todo/Improve_display_of_OpenIDs.mdwn new file mode 100644 index 000000000..e2ba1d90d --- /dev/null +++ b/doc/todo/Improve_display_of_OpenIDs.mdwn @@ -0,0 +1,5 @@ +Some OpenIDs seen in the IkiWiki git history are displayed poorly in [[RecentChanges]], including mine :-) (`http://smcv.pseudorandom.co.uk/`, shown as `smcv.pseudorandom [co.uk]`) + +My `openid` branch on <http://git.pseudorandom.co.uk/> improves on a couple of cases and adds a regression test. --[[smcv]] + +[[!tag patch done]] diff --git a/doc/todo/Inline_plugin_option_to_show_full_page_path.mdwn b/doc/todo/Inline_plugin_option_to_show_full_page_path.mdwn index 9f52a724a..691694009 100644 --- a/doc/todo/Inline_plugin_option_to_show_full_page_path.mdwn +++ b/doc/todo/Inline_plugin_option_to_show_full_page_path.mdwn @@ -19,7 +19,7 @@ Cheers, index 59eabb6..82913ba 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm - @@ -229,6 +229,7 @@ sub preprocess_inline (@) { #{{{ + @@ -229,6 +229,7 @@ sub preprocess_inline (@) { $template->param(content => $content); } $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage})); diff --git a/doc/todo/Move_teximg_latex_preamble_to_config_file.mdwn b/doc/todo/Move_teximg_latex_preamble_to_config_file.mdwn index d94d24ee4..3cedd5ae3 100644 --- a/doc/todo/Move_teximg_latex_preamble_to_config_file.mdwn +++ b/doc/todo/Move_teximg_latex_preamble_to_config_file.mdwn @@ -71,10 +71,10 @@ Happy TeXing. + +my $default_postfix = '\\end{document}'; + - sub import { #{{{ + sub import { hook(type => "getsetup", id => "teximg", call => \&getsetup); hook(type => "preprocess", id => "teximg", call => \&preprocess); - @@ -21,6 +33,26 @@ sub getsetup () { #{{{ + @@ -21,6 +33,26 @@ sub getsetup () { safe => 1, rebuild => undef, }, @@ -98,10 +98,10 @@ Happy TeXing. + safe => 0, # Not sure how secure LaTeX is... + rebuild => 1, + }, - } #}}} + } - sub preprocess (@) { #{{{ - @@ -105,25 +137,35 @@ sub gen_image ($$$$) { #{{{ + sub preprocess (@) { + @@ -105,25 +137,35 @@ sub gen_image ($$$$) { my $digest = shift; my $imagedir = shift; diff --git a/doc/todo/Set_arbitrary_date_to_be_used_by_calendar_plugin.mdwn b/doc/todo/Set_arbitrary_date_to_be_used_by_calendar_plugin.mdwn index a26433919..89167c084 100644 --- a/doc/todo/Set_arbitrary_date_to_be_used_by_calendar_plugin.mdwn +++ b/doc/todo/Set_arbitrary_date_to_be_used_by_calendar_plugin.mdwn @@ -42,13 +42,13 @@ Longer term plans: my %cache; my %linkcache; @@ -32,6 +34,7 @@ - sub import { #{{{ + sub import { hook(type => "needsbuild", id => "version", call => \&needsbuild); hook(type => "preprocess", id => "calendar", call => \&preprocess); + hook(type => "preprocess", id => "event", call => \&preprocess_event); - } #}}} + } - sub is_leap_year (@) { #{{{ + sub is_leap_year (@) { @@ -58,6 +61,7 @@ my $nmonth = $params{nmonth}; my $pyear = $params{pyear}; @@ -137,9 +137,9 @@ Longer term plans: # finish off the week @@ -304,6 +333,18 @@ return $calendar; - } #}}} + } - +sub preprocess_event (@) { #{{{ + +sub preprocess_event (@) { + my %params=@_; + # if now time is given, use now + $params{begin} = localtime($time) unless defined $params{begin}; @@ -151,7 +151,7 @@ Longer term plans: + return "<!-- $params{begin} -->"; +} #}} + - sub preprocess (@) { #{{{ + sub preprocess (@) { my %params=@_; $params{pages} = "*" unless defined $params{pages}; @@ -311,6 +352,8 @@ diff --git a/doc/todo/Silence_monotone_warning.mdwn b/doc/todo/Silence_monotone_warning.mdwn index e3f0224c2..d875900c5 100644 --- a/doc/todo/Silence_monotone_warning.mdwn +++ b/doc/todo/Silence_monotone_warning.mdwn @@ -4,7 +4,7 @@ A quick [[patch]] to silence a [[rcs/monotone]] warning I started seeing: index 4b9be31..9d4e280 100644 --- a/IkiWiki/Plugin/monotone.pm +++ b/IkiWiki/Plugin/monotone.pm - @@ -55,7 +55,7 @@ sub checkconfig () { #{{{ + @@ -55,7 +55,7 @@ sub checkconfig () { error("Monotone version too old, is $version but required 0.38"); } diff --git a/doc/todo/Support_wildcard_inside_of_link__40____41___within_a_pagespec.mdwn b/doc/todo/Support_wildcard_inside_of_link__40____41___within_a_pagespec.mdwn index 2837634d9..8320f72a6 100644 --- a/doc/todo/Support_wildcard_inside_of_link__40____41___within_a_pagespec.mdwn +++ b/doc/todo/Support_wildcard_inside_of_link__40____41___within_a_pagespec.mdwn @@ -20,7 +20,7 @@ That doesn't work in ikiwiki 2.1, but I have it index 38aa46a..cd42e8d 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -1082,10 +1082,15 @@ sub match_link ($$;@) { #{{{ + @@ -1082,10 +1082,15 @@ sub match_link ($$;@) { my $links = $IkiWiki::links{$page} or return undef; return IkiWiki::FailReason->new("$page has no links") unless @$links; my $bestlink = IkiWiki::bestlink($from, $link); @@ -38,7 +38,7 @@ That doesn't work in ikiwiki 2.1, but I have it + } } return IkiWiki::FailReason->new("$page does not link to $link"); - } #}}} + } -- 1.5.1.1.g6aead diff --git a/doc/todo/Wrapper_config_with_multiline_regexp.mdwn b/doc/todo/Wrapper_config_with_multiline_regexp.mdwn index c0311bc92..7b4323de1 100644 --- a/doc/todo/Wrapper_config_with_multiline_regexp.mdwn +++ b/doc/todo/Wrapper_config_with_multiline_regexp.mdwn @@ -13,12 +13,12 @@ Second, the untainting of $configstring should allow newlines. +++ wiki-meta/perl/IkiWiki.pm Mon Jun 11 10:52:07 2007 @@ -205,7 +205,7 @@ - sub possibly_foolish_untaint ($) { #{{{ + sub possibly_foolish_untaint ($) { my $tainted=shift; - my ($untainted)=$tainted=~/(.*)/; + my ($untainted)=$tainted=~/(.*)/s; return $untainted; - } #}}} + } Modified: wiki-meta/perl/IkiWiki/Wrapper.pm diff --git a/doc/todo/add_forward_age_sorting_option_to_inline.mdwn b/doc/todo/add_forward_age_sorting_option_to_inline.mdwn index 684419f90..e91c5a42f 100644 --- a/doc/todo/add_forward_age_sorting_option_to_inline.mdwn +++ b/doc/todo/add_forward_age_sorting_option_to_inline.mdwn @@ -19,7 +19,7 @@ diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index d2e5832..9e52712 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm -@@ -194,6 +194,9 @@ sub preprocess_inline (@) { #{{{ +@@ -194,6 +194,9 @@ sub preprocess_inline (@) { elsif (! exists $params{sort} || $params{sort} eq 'age') { @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list; } diff --git a/doc/todo/apache_404_ErrorDocument_handler.mdwn b/doc/todo/apache_404_ErrorDocument_handler.mdwn new file mode 100644 index 000000000..b200ff3b2 --- /dev/null +++ b/doc/todo/apache_404_ErrorDocument_handler.mdwn @@ -0,0 +1,18 @@ +Apache's ErrorDocument directive lets you write a CGI script that will be invoked for all 404s. +IkiWiki could offer one as an optional wrapper; it would do much the same thing that the +existing recentchanges_link (or [[generic___39__do__61__goto__39___for_CGI]]) does when +encountering a nonexistent page. + +I think it'd probably have to be a separate CGI script because the environment with which +404 handlers are invoked is somewhat odd, and because it needs to return a 404 status +(having said that, it might make sense for `recentchanges_link` to return 404 rather than +200 anyway if the page doesn't exist). + +This would give IkiWiki the behaviour of many other wikis, where visiting a page that +does not yet exist prompts you to create it, without having to invoke the CGI for +successful requests. + +Due to [a well-known MSIE misfeature](http://support.microsoft.com/default.aspx?scid=kb;en-us;Q294807), +error output needs to be at least 512 bytes long, so some padding might also be required. + +I'm happy to write such a script if there is interest. --[[smcv]] diff --git a/doc/todo/automatic_use_of_syntax_plugin_on_source_code_files/discussion.mdwn b/doc/todo/automatic_use_of_syntax_plugin_on_source_code_files/discussion.mdwn index 467ec350e..dc6c0001e 100644 --- a/doc/todo/automatic_use_of_syntax_plugin_on_source_code_files/discussion.mdwn +++ b/doc/todo/automatic_use_of_syntax_plugin_on_source_code_files/discussion.mdwn @@ -17,13 +17,13 @@ Updated to use fix noted in [[bugs/multiple_pages_with_same_name]]. my %metaheaders; - sub import { #{{{ + sub import { hook(type => "getsetup", id => "sourcecode", call => \&getsetup); hook(type => "checkconfig", id => "sourcecode", call => \&checkconfig); hook(type => "pagetemplate", id => "sourcecode", call => \&pagetemplate); - } # }}} + } - sub getsetup () { #{{{ + sub getsetup () { return plugin => { safe => 1, @@ -57,9 +57,9 @@ Updated to use fix noted in [[bugs/multiple_pages_with_same_name]]. safe => 1, rebuild => 1, }, - } #}}} + } - sub checkconfig () { #{{{ + sub checkconfig () { if (! $config{sourcecode_lang}) { error("The sourcecode plugin requires a list of suffixes in the 'sourcecode_lang' config option"); } @@ -97,9 +97,9 @@ Updated to use fix noted in [[bugs/multiple_pages_with_same_name]]. error("Your installation of source-highlight cannot handle sourcecode language $lang!"); } } - } #}}} + } - sub htmlize (@) { #{{{ + sub htmlize (@) { my %params=@_; my $page = $params{page}; @@ -141,9 +141,9 @@ Updated to use fix noted in [[bugs/multiple_pages_with_same_name]]. } return '<div id="sourcecode">'."\r\n".join("\r\n",@html)."\r\n</div>\n"; - } # }}} + } - sub pagetemplate (@) { #{{{ + sub pagetemplate (@) { my %params=@_; my $page=$params{page}; @@ -154,6 +154,6 @@ Updated to use fix noted in [[bugs/multiple_pages_with_same_name]]. my %seen; $template->param(meta => join("\n", grep { (! $seen{$_}) && ($seen{$_}=1) } @{$metaheaders{$page}})); } - } # }}} + } 1 diff --git a/doc/todo/blogpost_plugin.mdwn b/doc/todo/blogpost_plugin.mdwn index 60b1e2515..bb91ffd02 100644 --- a/doc/todo/blogpost_plugin.mdwn +++ b/doc/todo/blogpost_plugin.mdwn @@ -51,13 +51,13 @@ Index: IkiWiki/Plugin/blogpost.pm +use POSIX; +use IkiWiki 2.00; + -+sub import { #{{{ ++sub import { + hook(type => "checkconfig", id => "blogpost", call => \&checkconfig); + hook(type => "authcgi", id => "blogpost", call => \&authcgi); + hook(type => "canedit", id => "blogpost", call => \&canedit); -+} # }}} ++} + -+sub checkconfig () { #{{{ ++sub checkconfig () { + if (! defined $config{blogformat}){ + $config{blogformat} = 'posts/%Y/%m/%d/$title'; + } @@ -72,9 +72,9 @@ Index: IkiWiki/Plugin/blogpost.pm + if (! defined $config{blogusers}) { + $config{blogusers} = (); # disallow all posting by default + } -+} #}}} ++} + -+sub authcgi ($$) { #{{{ ++sub authcgi ($$) { + my $cgi=shift; + my $session=shift; + @@ -115,16 +115,16 @@ Index: IkiWiki/Plugin/blogpost.pm + $cgi->param("page", $page); + } + -+} #}}} ++} + -+sub blogpage ($) { #{{{ ++sub blogpage ($) { + my $title=shift; + my $page=POSIX::strftime $config{blogformat}, localtime; + $page =~ s/\$title/$title/; + return $page; -+} #}}} ++} + -+sub canedit ($$$) { #{{{ ++sub canedit ($$$) { + my $page=shift; + my $cgi=shift; + my $session=shift; @@ -136,7 +136,7 @@ Index: IkiWiki/Plugin/blogpost.pm + return "" if ($config{blogusers} eq "*" || + grep {$_ eq $user} $config{blogusers}); + return ("not allowed to blog, $user"); -+} #}}} ++} + +1 Index: IkiWiki.pm diff --git a/doc/todo/bzr.mdwn b/doc/todo/bzr.mdwn index 179ea2f24..a50c58d26 100644 --- a/doc/todo/bzr.mdwn +++ b/doc/todo/bzr.mdwn @@ -56,15 +56,15 @@ and rcs_getctime and rcs_notify aren't written at all. --[[bma]] return @ret; } - sub rcs_update () { #{{{ + sub rcs_update () { # Not needed. - } #}}} + } - sub rcs_prepedit ($) { #{{{ + sub rcs_prepedit ($) { return ""; - } #}}} + } - sub rcs_commit ($$$;$$) { #{{{ + sub rcs_commit ($$$;$$) { my ($file, $message, $rcstoken, $user, $ipaddr) = @_; if (defined $user) { @@ -95,18 +95,18 @@ and rcs_getctime and rcs_notify aren't written at all. --[[bma]] system("bzr","whoami",$olduser); return undef; # success - } #}}} + } - sub rcs_add ($) { # {{{ + sub rcs_add ($) { my ($file) = @_; my @cmdline = ("bzr", "add", "--quiet", "$config{srcdir}/$file"); if (system(@cmdline) != 0) { warn "'@cmdline' failed: $!"; } - } #}}} + } - sub rcs_recentchanges ($) { #{{{ + sub rcs_recentchanges ($) { my ($num) = @_; eval q{use CGI 'escapeHTML'}; @@ -153,15 +153,15 @@ and rcs_getctime and rcs_notify aren't written at all. --[[bma]] } return @ret; - } #}}} + } - sub rcs_notify () { #{{{ + sub rcs_notify () { # TODO - } #}}} + } - sub rcs_getctime ($) { #{{{ + sub rcs_getctime ($) { # TODO - } #}}} + } 1 diff --git a/doc/todo/cas_authentication.mdwn b/doc/todo/cas_authentication.mdwn index c8ffe7005..8bf7042df 100644 --- a/doc/todo/cas_authentication.mdwn +++ b/doc/todo/cas_authentication.mdwn @@ -43,11 +43,11 @@ follows) ? > the use of it: `eval q{use AuthCAS}; error $@ if $@` + - +sub import { #{{{ + +sub import { + hook(type => "getopt", id => "cas", call => \&getopt); + hook(type => "auth", id => "cas", call => \&auth); + hook(type => "formbuilder_setup", id => "cas", call => \&formbuilder_setup); - +} # }}} + +} > Could you please use tabs for indentation of program flow? @@ -61,15 +61,15 @@ follows) ? > Why would you want to make other auth plugins not work? Could a site not > legitimatly chose to use this and another auth method? - +sub getopt () { #{{{ + +sub getopt () { + eval q{use Getopt::Long}; + error($@) if $@; + Getopt::Long::Configure('pass_through'); + GetOptions("cas_url=s" => \$config{cas_url}); + GetOptions("ca_file=s" => \$config{ca_file}); - +} #}}} + +} + - +sub auth ($$) { #{{{ + +sub auth ($$) { + my $q=shift; + my $session=shift; + @@ -98,11 +98,11 @@ follows) ? + error("CAS failure: ".&AuthCAS::get_errors()); + } + } - +} #}}} + +} + +# I use formbuilder_setup and not formbuilder type in order to bypass the +# Logout processing done in IkiWiki::CGI::cgi_prefs() - +sub formbuilder_setup (@) { #{{{ + +sub formbuilder_setup (@) { + my %params=@_; + + my $form=$params{form}; diff --git a/doc/todo/clear_page_to_delete.mdwn b/doc/todo/clear_page_to_delete.mdwn index 50ae246bb..6bab6ef27 100644 --- a/doc/todo/clear_page_to_delete.mdwn +++ b/doc/todo/clear_page_to_delete.mdwn @@ -4,7 +4,7 @@ cleared to be entirely empty (or only have whitespace)? Discuss. --[[Joey]] I'd say so; yes. A method of deleting pages via the web would be great; I can't think of a use of keeping blank pages around. What about vandalism -- if someone blanks a page and deletes it and someone else wishes to restore -it; or is undoing edits via the web a bigger issue? -- [[JonDowland]] +it; or is undoing edits via the web a bigger issue? -- [[users/Jon]] Of course there's already a way to delete pages (remove plugin). So the question is really: @@ -30,4 +30,4 @@ keyword and if there were no page editions since XX days. Here, I use pages that can be empty everyday and filled all day long. It does not make sense to me to delete these pages :). --[[xma]] -I was not aware of [[plugins/remove]]. I don't think another method is necessary -- [[JonDowland]] +I was not aware of [[plugins/remove]]. I don't think another method is necessary -- [[users/Jon]] diff --git a/doc/todo/color_plugin.mdwn b/doc/todo/color_plugin.mdwn index 69afe837d..19fba3b35 100644 --- a/doc/todo/color_plugin.mdwn +++ b/doc/todo/color_plugin.mdwn @@ -132,12 +132,12 @@ Of course, I'm open for discussion or exchange of ideas :) --[[Paweł|ptecza]] +use strict; +use IkiWiki 2.00; + - +sub import { #{{{ + +sub import { + hook(type => "preprocess", id => "color", call => \&preprocess); + hook(type => "format", id => "color", call => \&format); - +} #}}} + +} + - +sub preserve_style ($$$) { #{{{ + +sub preserve_style ($$$) { + my $foreground = shift; + my $background = shift; + my $text = shift; @@ -162,18 +162,18 @@ Of course, I'm open for discussion or exchange of ideas :) --[[Paweł|ptecza]] + + return $preserved; + - +} #}}} + +} + - +sub replace_preserved_style ($) { #{{{ + +sub replace_preserved_style ($) { + my $content = shift; + + $content =~ s!<span class="color">((color: ([a-z]+|\#[0-9a-f]{3,6})?)?((; )?(background-color: ([a-z]+|\#[0-9a-f]{3,6})?)?)?)</span>!<span class="color" style="$1">!g; + $content =~ s!<span class="colorend">!!g; + + return $content; - +} #}}} + +} + - +sub preprocess (@) { #{{{ + +sub preprocess (@) { + my %params = @_; + + # Preprocess the text to expand any preprocessor directives @@ -182,14 +182,14 @@ Of course, I'm open for discussion or exchange of ideas :) --[[Paweł|ptecza]] + IkiWiki::filter($params{page}, $params{destpage}, $params{text})); + + return preserve_style($params{foreground}, $params{background}, $params{text}); - +} #}}} + +} + - +sub format (@) { #{{{ + +sub format (@) { + my %params = @_; + + $params{content} = replace_preserved_style($params{content}); + return $params{content}; - +} #}}} + +} + +1 --- /dev/null 2008-06-21 02:02:15.000000000 +0200 diff --git a/doc/todo/comments.mdwn b/doc/todo/comments.mdwn new file mode 100644 index 000000000..832441be1 --- /dev/null +++ b/doc/todo/comments.mdwn @@ -0,0 +1,149 @@ +# Known issues with the [[plugins/comments]] plugin + +## Unimplemented + +* Instead of just a link to add a comment, it could have a form to enter + the title, similar to the form for adding a new blog post. + + > I'm not sure this is so useful? On Livejournal titles are allowed on + > comments, but very rarely used (and indeed usually not very useful); + > it's hard enough to get some people to title their blog posts :-) + > --[[smcv]] + +* If a spammer posts a comment, it is either impossible or hard to clean + up via the web. Would be nice to have some kind of link on the comment + that allows trusted users to remove it (using the remove plugin of + course). + + > Won't the remove plugin refuse to remove internal pages? This would be + > a good feature to have, though. --[[smcv]] + + > Here, FWIW, is the first ikiwiki comment spam I've seen: + > <http://waldeneffect.org/blog/Snake_bite_information/#blog/Snake_bite_information/comment_1> + > So that took about 10 days... + > --[[Joey]] + +## Patches pending merge + +* There is some common code cargo-culted from other plugins (notably inline and editpage) which + should probably be shared + + > Actually, there's less of this now than there used to be - a lot of simple + > things that were shared have become unshareable as they became more + > complex. --[[smcv]] + + > There's still goto. You have a branch for that. --[[Joey]] + +## Won't fix + +* It would be useful to have a pagespec that always matches all comments on + pages matching a glob. Something like `comment(blog/*)`. + Perhaps postcomment could also be folded into this? Then the pagespec + would match both existing comments, as well as new comments that are + being posted. + + > Please see [[plugins/comments/discussion]]. If I've convinced you that + > internal pages are the way forward, then sure, we can do that, because + > people who can comment still won't be able to edit others' comments + > (one of my goals is that commenters can't put words into each other's + > mouths :-) ) + > + > On the other hand, if you still want me to switch this plugin to "real" + > pages, or if internal pages might become editable in future, then + > configuring lockedit/anonok so a user X can add comments to blog pages + > would also let X edit/delete comments on blog pages (including those + > written by others) in arbitrary ways, which doesn't seem good. --[[smcv]] + + > I had a look at implementing comment() and fell afoul of + > some optimisations that assume only internal() will be used to match + > internal pages. So probably this isn't worth doing. --[[Joey]] + +## Done + +* The default template should have a (?) icon next to unauthenticated users (with the IP address + as title) and an OpenID icon next to OpenIDs + + > Done in my comments git branch, at least as a mockup (using the (?), + > {x} and {*} smileys for anonymous, OpenID and login respectively). + > --[[smcv]] + + >> I've improved this to use independent icons from the wikiicons + >> directory (untested!) --[[smcv]] + + >>> The new code produces links like /wikiisons/openid.png, which + >>> fail if ikiwiki is not at the root of the web server. --[[Joey]] + + >>>> Sorry, I should have spotted that (the assumption failed on my demo + >>>> site, but the push to that site was when I was on the way out, so I + >>>> didn't have time to investigate). As a note for other ikiwiki hackers, + >>>> I should have used + >>>> `<img src="<TMPL_VAR NAME=BASEURL>wikiicons/openid.png" />`. --[[smcv]] + + >>> I got to wondering if the icons are needed. On my comments branch + >>> (not master), I've dropped the icons and info can be seen by hovering + >>> over the author's name. Idea being that you probably don't care how + >>> they authenticated unless something is weird, and in that case you + >>> can hover to check. Does that make sense, should I merge it? + >>> --[[Joey]] + + >>>> Yeah, go ahead. I preferred my layout with the author before the + >>>> comment - perhaps that's Livejournal's influence :-) - but I can always + >>>> edit the templates for my own site. As long as the default is something + >>>> reasonable and both layouts are possible, I don't really mind. + >>>> Minimizing the number of "resource" files in the basewiki also seems + >>>> a good goal. --[[smcv]] + +* Previews always say "unknown IP address" + + > Fixed in my comments branch by commits bc66a00b and 95b3bbbf --[[smcv]] + +* The Comments link in the "toolbar" is to `index.html#comments`, not the + desired `./#comments` + + > Fixed in my comments branch by commit 0844bd0b; commits 5b1cf21a + > and c42f174e fix another `beautify_urlpath` bug and add a regression test + > --[[smcv]] + +* Now that inline has some comments-specific functionality anyway, it would + be good to output `<link rel="comments">` in Atom and the equivalent in RSS. + + > Fixed in my comments branch by d0d598e4, 3feebe31, 9e5f504e --[[smcv]] + + +* Add `COMMENTOPENID`: the authenticated/verified user name, if and only if it was an OpenID + + > Done in my comments git branch --[[smcv]] + + > Not seeing it there, which branch? --[[Joey]] + + >> Bah, git push --all is not the default... 'comments' branch now (I've also rebased it). + >> Sorry, I'm on mobile Internet at the moment... --[[smcv]] + + >>> merged by [[Joey]] in commit 0f03af38 --[[smcv]] + +* Should the comments be visually set off more from the page above? + Rather than just a horizontal rule, I'm thinking put the comments + in a box like is used for inlined pages. + + > I did put them in a box in the CSS... I agree the default template + > could do with visual improvement though. --[[smcv]] + + >> I'll consider this solved by [[Joey]]'s changes. --[[smcv]] + +* One can use inline to set up a feed of all comments posted to any page. + Using template=comment they are displayed right. Only problem + is there is no indication in that template of what page each comment in the + feed is a comment on. So, if a comment is inlined into a different page, + I think it should show a link back to the page commented on. + (BTW, the rss feed in this situation seems ok; there the link element + points back to the parent page. + + > done --[[Joey]] + +* One of Joey's commit messages says "Not ideal, it would be nicer to jump to + the actual comment posted, but no anchor is available". In fact there is + an anchor - the `\[[_comment]]` preprocessing wraps the comment in a `<div>` + with id="comment_123" or something. I'll fix this, unless Joey gets there + first. --[[smcv]] + + > done --[[Joey]] diff --git a/doc/todo/darcs.mdwn b/doc/todo/darcs.mdwn index e5bf5ee27..882a41379 100644 --- a/doc/todo/darcs.mdwn +++ b/doc/todo/darcs.mdwn @@ -219,14 +219,14 @@ This is my ([bma](bma@bmalee.eu)) darcs.pm - it's messy (my Perl isn't up to muc package IkiWiki; - sub rcs_update () { #{{{ + sub rcs_update () { # Do nothing - there's nowhere to update *from*. - } #}}} + } - sub rcs_prepedit ($) { #{{{ - } #}}} + sub rcs_prepedit ($) { + } - sub rcs_commit ($$$;$$) { #{{{ + sub rcs_commit ($$$;$$) { my ($file, $message, $rcstoken, $user, $ipaddr) = @_; # $user should probably be a name and an email address, by darcs @@ -257,16 +257,16 @@ This is my ([bma](bma@bmalee.eu)) darcs.pm - it's messy (my Perl isn't up to muc return undef; # success - sub rcs_add ($) { # {{{ + sub rcs_add ($) { my ($file) = @_; my @cmdline = ("darcs", "add", "--repodir", "$config{srcdir}", "-a", "-q", "$file"); if (system(@cmdline) != 0) { warn "'@cmdline' failed: $!"; } - } #}}} + } - sub rcs_recentchanges ($) { #{{{ + sub rcs_recentchanges ($) { # TODO: This is horrible code. It doesn't work perfectly, and uses regexes # rather than parsing Darcs' XML output. my $num=shift; @@ -314,15 +314,15 @@ This is my ([bma](bma@bmalee.eu)) darcs.pm - it's messy (my Perl isn't up to muc } } return @ret; - } #}}} + } - sub rcs_notify () { #{{{ + sub rcs_notify () { # TODO - } #}}} + } - sub rcs_getctime ($) { #{{{ + sub rcs_getctime ($) { error gettext("getctime not implemented"); - } #}}} + } 1 diff --git a/doc/todo/datearchives-plugin.mdwn b/doc/todo/datearchives-plugin.mdwn index 5a5560d6c..5f33cde4c 100644 --- a/doc/todo/datearchives-plugin.mdwn +++ b/doc/todo/datearchives-plugin.mdwn @@ -17,11 +17,11 @@ Index: IkiWiki/Plugin/datearchives.pm +use strict; +use IkiWiki; + -+sub import { #{{{ ++sub import { + hook(type => "pagetemplate", id => "datearchives", call => \&pagetemplate, scan => 1); -+} # }}} ++} + -+sub pagetemplate (@) { #{{{ ++sub pagetemplate (@) { + my %args = @_; + my $dt; + eval { @@ -37,7 +37,7 @@ Index: IkiWiki/Plugin/datearchives.pm + $template->param(ctime => htmllink( $args{page}, $args{destpage}, $link, 0, 0, + $template->param('ctime'))); + } -+} # }}} ++} + +1 </pre> diff --git a/doc/todo/different_search_engine.mdwn b/doc/todo/different_search_engine.mdwn index 2f309dea5..9d0fc92c9 100644 --- a/doc/todo/different_search_engine.mdwn +++ b/doc/todo/different_search_engine.mdwn @@ -126,7 +126,7 @@ Index: IkiWiki/Plugin/search.pm + $PLUCENE_DIR = $config{wikistatedir}.'/plucene'; +} + - sub import { #{{{ + sub import { - hook(type => "getopt", id => "hyperestraier", - call => \&getopt); - hook(type => "checkconfig", id => "hyperestraier", @@ -142,14 +142,14 @@ Index: IkiWiki/Plugin/search.pm call => \&change); - hook(type => "cgi", id => "hyperestraier", - call => \&cgi); - } # }}} + } --sub getopt () { #{{{ +-sub getopt () { - eval q{use Getopt::Long}; - error($@) if $@; - Getopt::Long::Configure('pass_through'); - GetOptions("estseek=s" => \$config{estseek}); --} #}}} +-} +sub writer { + init(); @@ -165,20 +165,20 @@ Index: IkiWiki/Plugin/search.pm + grep { defined pagetype($_) } @_; +} + - sub checkconfig () { #{{{ + sub checkconfig () { foreach my $required (qw(url cgiurl)) { if (! length $config{$required}) { @@ -36,112 +58,55 @@ } - } #}}} + } -my $form; --sub pagetemplate (@) { #{{{ +-sub pagetemplate (@) { - my %params=@_; - my $page=$params{page}; - my $template=$params{template}; +#my $form; -+#sub pagetemplate (@) { #{{{ ++#sub pagetemplate (@) { +# my %params=@_; +# my $page=$params{page}; +# my $template=$params{template}; @@ -193,7 +193,7 @@ Index: IkiWiki/Plugin/search.pm +# +# $template->param(searchform => $form); +# } -+#} #}}} ++#} - # Add search box to page header. - if ($template->query(name => "searchform")) { @@ -205,9 +205,9 @@ Index: IkiWiki/Plugin/search.pm - - $template->param(searchform => $form); - } --} #}}} +-} - - sub delete (@) { #{{{ + sub delete (@) { - debug(gettext("cleaning hyperestraier search index")); - estcmd("purge -cl"); - estcfg(); @@ -219,9 +219,9 @@ Index: IkiWiki/Plugin/search.pm + $reader->delete_term( Plucene::Index::Term->new({ field => "id", text => $_ })); + } + $reader->close; - } #}}} + } - sub change (@) { #{{{ + sub change (@) { - debug(gettext("updating hyperestraier search index")); - estcmd("gather -cm -bc -cl -sd", - map { @@ -250,9 +250,9 @@ Index: IkiWiki/Plugin/search.pm + $doc->add(Plucene::Document::Field->UnStored('text' => $data)); + $writer->add_document($doc); + } - } #}}} + } - --sub cgi ($) { #{{{ +-sub cgi ($) { - my $cgi=shift; - - if (defined $cgi->param('phrase') || defined $cgi->param("navi")) { @@ -260,10 +260,10 @@ Index: IkiWiki/Plugin/search.pm - chdir("$config{wikistatedir}/hyperestraier") || error("chdir: $!"); - exec("./".IkiWiki::basename($config{cgiurl})) || error("estseek.cgi failed"); - } --} #}}} +-} - -my $configured=0; --sub estcfg () { #{{{ +-sub estcfg () { - return if $configured; - $configured=1; - @@ -301,9 +301,9 @@ Index: IkiWiki/Plugin/search.pm - unlink($cgi); - my $estseek = defined $config{estseek} ? $config{estseek} : '/usr/lib/estraier/estseek.cgi'; - symlink($estseek, $cgi) || error("symlink $estseek $cgi: $!"); --} # }}} +-} - --sub estcmd ($;@) { #{{{ +-sub estcmd ($;@) { - my @params=split(' ', shift); - push @params, "-cl", "$config{wikistatedir}/hyperestraier"; - if (@_) { @@ -323,7 +323,7 @@ Index: IkiWiki/Plugin/search.pm - open(STDOUT, "/dev/null"); # shut it up (closing won't work) - exec("estcmd", @params) || error("can't run estcmd"); - } --} #}}} +-} - -1 +1; diff --git a/doc/todo/directive_docs.mdwn b/doc/todo/directive_docs.mdwn index 1f6307381..2baa61b40 100644 --- a/doc/todo/directive_docs.mdwn +++ b/doc/todo/directive_docs.mdwn @@ -40,15 +40,15 @@ defined them: --[[Joey]] index e476521..afe982a 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -493,6 +493,7 @@ sub loadplugins () { #{{{ + @@ -493,6 +493,7 @@ sub loadplugins () { return 1; - } #}}} + } +my $loading_plugin; - sub loadplugin ($) { #{{{ + sub loadplugin ($) { my $plugin=shift; - @@ -502,14 +503,18 @@ sub loadplugin ($) { #{{{ + @@ -502,14 +503,18 @@ sub loadplugin ($) { "$installdir/lib/ikiwiki") { if (defined $dir && -x "$dir/plugins/$plugin") { require IkiWiki::Plugin::external; @@ -67,7 +67,7 @@ defined them: --[[Joey]] if ($@) { error("Failed to load plugin $mod: $@"); } - @@ -1429,6 +1434,9 @@ sub hook (@) { # {{{ + @@ -1429,6 +1434,9 @@ sub hook (@) { return if $param{no_override} && exists $hooks{$param{type}}{$param{id}}; @@ -76,4 +76,4 @@ defined them: --[[Joey]] + $hooks{$param{type}}{$param{id}}=\%param; return 1; - } # }}} + } diff --git a/doc/todo/enable-htaccess-files.mdwn b/doc/todo/enable-htaccess-files.mdwn index b3c174fba..e3b295123 100644 --- a/doc/todo/enable-htaccess-files.mdwn +++ b/doc/todo/enable-htaccess-files.mdwn @@ -5,7 +5,7 @@ @@ -26,7 +26,7 @@ memoize("file_pruned"); - sub defaultconfig () { #{{{ + sub defaultconfig () { - wiki_file_prune_regexps => [qr/\.\./, qr/^\./, qr/\/\./, + wiki_file_prune_regexps => [qr/\.\./, qr/^\.(?!htaccess)/, qr/\/\.(?!htaccess)/, qr/\.x?html?$/, qr/\.ikiwiki-new$/, diff --git a/doc/todo/format_escape.mdwn b/doc/todo/format_escape.mdwn index 8dfe05581..574883d1b 100644 --- a/doc/todo/format_escape.mdwn +++ b/doc/todo/format_escape.mdwn @@ -141,13 +141,13 @@ Index: IkiWiki/Plugin/rst.pm print html[html.find('<body>')+6:html.find('</body>')].strip(); "; - sub import { #{{{ + sub import { hook(type => "htmlize", id => "rst", call => \&htmlize); + hook(type => "htmlescape", id => "rst", call => \&htmlescape); + hook(type => "htmlescapelink", id => "rst", call => \&htmlescapelink); - } # }}} + } -+sub htmlescapelink ($$;@) { #{{{ ++sub htmlescapelink ($$;@) { + my $url = shift; + my $text = shift; + my %params = @_; @@ -158,15 +158,15 @@ Index: IkiWiki/Plugin/rst.pm + else { + return "`$text <$url>`_"; + } -+} # }}} ++} + -+sub htmlescape ($) { #{{{ ++sub htmlescape ($) { + my $html=shift; + $html=~s/^/ /mg; + return ".. raw:: html\n\n".$html; -+} # }}} ++} + - sub htmlize (@) { #{{{ + sub htmlize (@) { my %params=@_; my $content=$params{content}; Index: doc/plugins/write.mdwn @@ -272,7 +272,7 @@ Index: IkiWiki.pm + return $hooks{htmlescapelink}{$type}{call}->($bestlink, $linktext); + } return "<a href=\"$bestlink\">$linktext</a>"; - } #}}} + } @@ -628,6 +640,14 @@ preview => $preprocess_preview, diff --git a/doc/todo/fortune:_select_options_via_environment.mdwn b/doc/todo/fortune:_select_options_via_environment.mdwn index f906312fe..ddacd91b5 100644 --- a/doc/todo/fortune:_select_options_via_environment.mdwn +++ b/doc/todo/fortune:_select_options_via_environment.mdwn @@ -14,9 +14,9 @@ package IkiWiki::Plugin::fortune; use warnings; - @@ -12,7 +18,13 @@ sub import { #{{{ + @@ -12,7 +18,13 @@ sub import { - sub preprocess (@) { #{{{ + sub preprocess (@) { $ENV{PATH}="$ENV{PATH}:/usr/games:/usr/local/games"; - my $f = `fortune 2>/dev/null`; + my $f; diff --git a/doc/todo/generic___39__do__61__goto__39___for_CGI.mdwn b/doc/todo/generic___39__do__61__goto__39___for_CGI.mdwn new file mode 100644 index 000000000..1828f0a7b --- /dev/null +++ b/doc/todo/generic___39__do__61__goto__39___for_CGI.mdwn @@ -0,0 +1,9 @@ +The [[plugins/recentchanges]] plugin has a `do=recentchanges_link` feature that will +redirect to a given wiki page, or an error page with a creation link. + +In the [[plugins/contrib/comments]] plugin I've found that it would be useful to do +the same for users. For now I've just cloned the functionality into the comments +plugin, but perhaps this functionality could be renamed to `do=goto` or +something, and moved to `IkiWiki/CGI.pm`? + +If there's general approval I'm happy to write a patch. --[[smcv]] diff --git a/doc/todo/httpauth_feature_parity_with_passwordauth.mdwn b/doc/todo/httpauth_feature_parity_with_passwordauth.mdwn index 8a338ece1..eb71cf840 100644 --- a/doc/todo/httpauth_feature_parity_with_passwordauth.mdwn +++ b/doc/todo/httpauth_feature_parity_with_passwordauth.mdwn @@ -1,5 +1,8 @@ -The only way to have a private ikiwiki, with a shared user database for static pages and CGI authentication, is to use [[plugins/httpauth]]. It would be good for httpauth to be on par with [[plugins/passwordauth]], -i.e. to allow registering users, resetting passwords, and changing passwords; supporting some kind of +The only way to have a private ikiwiki, with a shared user database +for static pages and CGI authentication, is to use +[[plugins/httpauth]]. It would be good for httpauth to be on par with +[[plugins/passwordauth]], i.e. to allow registering users, resetting +passwords, and changing passwords; supporting some kind of `account_creation_password` configuration option would be nice, too. I'll probably propose patches implementing this at some point. @@ -8,4 +11,18 @@ the relevant passwordauth code, instead of rewriting it completely in httpauth. -- [[intrigeri]] +Well, on such a private wiki, one can neither register herself nor +reset his password: the registration page, as any other page, would be +forbidden to non-authenticated users. Admin users should then be +enabled to: + +- register a new user +- reset someone else's password + +In both cases, a brand new random password is sent by e-mail to the +new user. + +An authenticated user should nevertheless be able to change his +own password. -- [[intrigeri]] + [[wishlist]] diff --git a/doc/todo/index.html_allowed.mdwn b/doc/todo/index.html_allowed.mdwn index f030f9eea..f5e6f8cd7 100644 --- a/doc/todo/index.html_allowed.mdwn +++ b/doc/todo/index.html_allowed.mdwn @@ -91,15 +91,15 @@ page "A/B/index.html" is treated as "A/B". +++ ikidev/IkiWiki.pm 2007-02-25 15:05:22.328852000 -0800 @@ -192,6 +192,12 @@ return $untainted; - } #}}} + } - +sub titlename($;@) { #{{{ + +sub titlename($;@) { + my $page = shift; + $page =~ s!/index$!!; + return pagetitle(basename($page), @_); - +} #}}} + +} + - sub basename ($) { #{{{ + sub basename ($) { my $file=shift; @@ -117,7 +117,7 @@ diff -ru ikiwiki-2.4/IkiWiki.pm ikiwiki/IkiWiki.pm $page=~s/\Q.$type\E*$// if defined $type; + $page=~s/\/index$// if $page =~ /\/index$/; return $page; - } #}}} + } </pre> diff --git a/doc/todo/inline:_numerical_ordering_by_title.mdwn b/doc/todo/inline:_numerical_ordering_by_title.mdwn index 95511d998..3f6c8b598 100644 --- a/doc/todo/inline:_numerical_ordering_by_title.mdwn +++ b/doc/todo/inline:_numerical_ordering_by_title.mdwn @@ -155,11 +155,11 @@ Joey, have you forgotten about that request? ;) --[[Paweł|ptecza]] %config %links %pagestate %renderedfiles %pagesources %destsources); our $VERSION = 2.00; # plugin interface version, next is ikiwiki version - @@ -835,6 +835,42 @@ sub titlepage ($) { #{{{ + @@ -835,6 +835,42 @@ sub titlepage ($) { return $title; - } #}}} + } - +sub titlecmp ($$) { #{{{ + +sub titlecmp ($$) { + my $titleA=shift; + my $titleB=shift; + @@ -193,29 +193,29 @@ Joey, have you forgotten about that request? ;) --[[Paweł|ptecza]] + return -1 if (@listB); + + return 0; - +} #}}} + +} + - sub linkpage ($) { #{{{ + sub linkpage ($) { my $link=shift; my $chars = defined $config{wiki_file_chars} ? $config{wiki_file_chars} : "-[:alnum:]+/.:_"; diff --git a/IkiWiki/Plugin/brokenlinks.pm b/IkiWiki/Plugin/brokenlinks.pm index 37752dd..ccaa399 100644 --- a/IkiWiki/Plugin/brokenlinks.pm +++ b/IkiWiki/Plugin/brokenlinks.pm - @@ -59,7 +59,7 @@ sub preprocess (@) { #{{{ + @@ -59,7 +59,7 @@ sub preprocess (@) { map { "<li>$_</li>" } - sort @broken) + sort titlecmp @broken) ."</ul>\n"; - } # }}} + } diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index 8efef3f..263e7a6 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm - @@ -192,7 +192,7 @@ sub preprocess_inline (@) { #{{{ + @@ -192,7 +192,7 @@ sub preprocess_inline (@) { } if (exists $params{sort} && $params{sort} eq 'title') { @@ -228,20 +228,20 @@ Joey, have you forgotten about that request? ;) --[[Paweł|ptecza]] index b910758..10a1d87 100644 --- a/IkiWiki/Plugin/orphans.pm +++ b/IkiWiki/Plugin/orphans.pm - @@ -56,7 +56,7 @@ sub preprocess (@) { #{{{ + @@ -56,7 +56,7 @@ sub preprocess (@) { htmllink($params{page}, $params{destpage}, $_, noimageinline => 1). "</li>" - } sort @orphans). + } sort titlecmp @orphans). "</ul>\n"; - } # }}} + } diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm index ceb7c84..00798e1 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm - @@ -89,7 +89,7 @@ sub genpage ($$) { #{{{ + @@ -89,7 +89,7 @@ sub genpage ($$) { $template->param(have_actions => 1); } diff --git a/doc/todo/inline_plugin:_ability_to_override_feed_name.mdwn b/doc/todo/inline_plugin:_ability_to_override_feed_name.mdwn new file mode 100644 index 000000000..df5bf9194 --- /dev/null +++ b/doc/todo/inline_plugin:_ability_to_override_feed_name.mdwn @@ -0,0 +1,29 @@ +If RSS and Atom are enabled by default, the [[plugins/contrib/comments]] +plugin generates a feed, perhaps `/sandbox/index.atom` for comments on the +sandbox. If a blog is added to the page, the blog will steal the name +`/sandbox/index.atom` and the comments plugin's feed will change to +`/sandbox/index.atom2`. + +If `\[[!inline]]` gained a parameter `feedname` or something, the comments +plugin could use `feedname=comments` to produce `/sandbox/comments.atom` +instead (this would just require minor enhancements to rsspage(), +atompage() and targetpage()). + +As a side benefit, [my blog](http://smcv.pseudorandom.co.uk/) could go back +to its historical Atom feed URL of `.../feed.atom` (which is currently a +symlink to `index.atom` :-) ) + +On sites not using `usedirs` the current feed is `/sandbox.atom`, and we +could perhaps change it to `/sandbox-comments.atom` or +`/sandbox/comments.atom` if `feedname=comments` is given. + +--[[smcv]] + +> This is slightly hard to do, because you have to worry about +> conflicting pages setting feedname, which could cause ikiwiki to blow up. +> +> Particularly for the non-usedirs case, where a page `sandbox/comments` +> would produce the same feed as sandbox with `feedname=comments`. +> --[[Joey]] + +> [[done]] as feedfile option --[[Joey]] diff --git a/doc/todo/inline_plugin:_hide_feed_buttons_if_empty.mdwn b/doc/todo/inline_plugin:_hide_feed_buttons_if_empty.mdwn new file mode 100644 index 000000000..d046c0cd0 --- /dev/null +++ b/doc/todo/inline_plugin:_hide_feed_buttons_if_empty.mdwn @@ -0,0 +1,7 @@ + < joeyh> 03:49:19> also, I think it may be less visually confusing to + drop the rss/atom buttons for comments when there are none yet + +This seems to me like something that applies to the [[plugins/inline]] plugin in general, rather than the [[plugins/contrib/comments]] plugin specifically. --[[smcv]] + +>> [[done]] as emptyfeeds option, not on by default for inline, but I think +>> it should be for comments --[[Joey]] diff --git a/doc/todo/language_definition_for_the_meta_plugin.mdwn b/doc/todo/language_definition_for_the_meta_plugin.mdwn index 33098c601..4ac4e2e25 100644 --- a/doc/todo/language_definition_for_the_meta_plugin.mdwn +++ b/doc/todo/language_definition_for_the_meta_plugin.mdwn @@ -54,7 +54,7 @@ This may be useful for sites with a few pages in different languages, but no ful my %authorurl; +my %lang; - sub import { #{{{ + sub import { hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1); @@ -100,6 +101,11 @@ $meta{$page}.='<link href="'.encode_entities($value). @@ -75,7 +75,7 @@ This may be useful for sites with a few pages in different languages, but no ful + $template->param(lang => $lang{$page}) + if exists $lang{$page} && $template->query(name => "lang"); - } # }}} + } </pre> > Please resolve lang somewhere reusable rather than within meta plugin: It is certainly usable outside diff --git a/doc/todo/location_of_external_plugins.mdwn b/doc/todo/location_of_external_plugins.mdwn new file mode 100644 index 000000000..c28003e74 --- /dev/null +++ b/doc/todo/location_of_external_plugins.mdwn @@ -0,0 +1,24 @@ +Would it be possible to make the installation location for the external +plugins (those talked to via xmlrpc) configurable? Currently, they are +installed into (and later expected to be in) /usr/lib/ikiwiki/plugins. For +the Fedora package (which I maintain), I move them to +/usr/libexec/ikiwiki/plugins. While not covered by the FHS, this seems to +be a more appropriate place, see: +https://fedoraproject.org/wiki/Packaging/Guidelines#Libexecdir. + +> This would need to be a build time configuration setting so the directory +> is built into ikiwiki for use at runtime. --[[Joey]] + +As a side note, the accompanying proxy.py might better be placed into some directory on the python path. + +> If someone can show how to do so without needing a Setup.py and all the +> pain that using one entails.. --[[Joey]] + +>> At the very least I don't think proxy.py should be on the `sys.path` +>> under its current name. If it was renamed to ikiwiki_proxy or some such, +>> possibly; but I think it's more appropriate to have it in an +>> ikiwiki-specific directory (a "private module") since it's not useful for +>> anything outside ikiwiki, and putting it in the same directory as the +>> external plugins means it's automatically in their `sys.path` without +>> needing special configuration. --[[smcv]] +>> (a mostly-inactive member of Debian's Python modules packaging team) diff --git a/doc/todo/location_of_ikiwiki-w3m.cgi.mdwn b/doc/todo/location_of_ikiwiki-w3m.cgi.mdwn new file mode 100644 index 000000000..8ca925bee --- /dev/null +++ b/doc/todo/location_of_ikiwiki-w3m.cgi.mdwn @@ -0,0 +1,3 @@ +The `ikiwiki-w3m.cgi` script is installed (hard-coded) into `/usr/lib/w3m/cgi-bin`. On Fedora however, the w3m package expects it in `/usr/libexec/w3m/cgi-bin`. So, it would be nice if the destination for this script could be configured. + +> You can use `W3M_CGI_BIN now`. [[done]] --[[Joey]] diff --git a/doc/todo/mbox.mdwn b/doc/todo/mbox.mdwn index 081d51200..f7744563c 100644 --- a/doc/todo/mbox.mdwn +++ b/doc/todo/mbox.mdwn @@ -14,3 +14,6 @@ I'd like to be able to drop an unmodified RFC2822 email message into ikiwiki, an >>> to page foo). I'm not sure if this is possible and worthwhile to fix. >> It is certainly workable >>> to use a \[[!mailbox ]] directive. -- [[DavidBremner]] + +> Your gitweb doesn't tell me where I can git pull this from, which I'd +> like to do ... --[[Joey]] diff --git a/doc/todo/meta_rcsid.mdwn b/doc/todo/meta_rcsid.mdwn index 81a2c1328..158edea6e 100644 --- a/doc/todo/meta_rcsid.mdwn +++ b/doc/todo/meta_rcsid.mdwn @@ -26,7 +26,7 @@ of CVS/SVN-style keywords (like '$Id$', etc.) from the source file in the page t my %copyright; +my %rcsid; - sub import { #{{{ + sub import { hook(type => "preprocess", id => "meta", call => \&preprocess, scan => 1); @@ -110,6 +111,9 @@ $meta{$page}.="<link rel=\"copyright\" href=\"#page_copyright\" />\n"; diff --git a/doc/todo/missingparents.pm.mdwn b/doc/todo/missingparents.pm.mdwn index 0cc7137ba..c5f2ab535 100644 --- a/doc/todo/missingparents.pm.mdwn +++ b/doc/todo/missingparents.pm.mdwn @@ -82,15 +82,15 @@ Index: IkiWiki/Plugin/missingparents.pm +my %ownfiles; +my @pagespecs; + -+sub import { #{{{ ++sub import { + hook(type => "checkconfig", id => "missingparents", call => \&checkconfig); + hook(type => "needsdelete", id => "missingparents", call => \&needsdelete); + hook(type => "needsbuild", id => "missingparents", call => \&needsbuild); + hook(type => "savestate", id => "missingparents", call => \&savestate); + hook(type => "preprocess", id => "missingparents", call => \&preprocess_missingparents); -+} # }}} ++} + -+sub checkconfig () { #{{{ ++sub checkconfig () { + IkiWiki::preprocess("missingparents", "missingparents", + readfile(srcfile("missingparents.mdwn"))); + loadstate(); @@ -99,9 +99,9 @@ Index: IkiWiki/Plugin/missingparents.pm + unlink $config{srcdir}.'/'.$file; + } + } -+} #}}} ++} + -+sub preprocess_missingparents (@) { #{{{ ++sub preprocess_missingparents (@) { + my %params=@_; + + if (! defined $params{pages} || ! defined $params{generate}) { @@ -115,10 +115,10 @@ Index: IkiWiki/Plugin/missingparents.pm + #translators: is text for pages that match that pagespec. + return sprintf(gettext("missingparents in %s will be %s"), + '`'.$params{pages}.'`', '`\\'.$params{generate}.'`'); -+} # }}} ++} + +my $state_loaded=0; -+sub loadstate() { #{{{ ++sub loadstate() { + my $filename = "$config{wikistatedir}/missingparents"; + if (-e $filename) { + open (IN, $filename) || @@ -132,9 +132,9 @@ Index: IkiWiki/Plugin/missingparents.pm + + $state_loaded=1; + } -+} #}}} ++} + -+sub savestate() { #{{{ ++sub savestate() { + my $filename = "$config{wikistatedir}/missingparents.new"; + my $cleanup = sub { unlink ($filename) }; + open (OUT, ">$filename") || error("open $filename: $!", $cleanup); @@ -143,9 +143,9 @@ Index: IkiWiki/Plugin/missingparents.pm + } + rename($filename, "$config{wikistatedir}/missingparents") || + error("rename $filename: $!", $cleanup); -+} #}}} ++} + -+sub needsdelete (@) { #{{{ ++sub needsdelete (@) { + my $files=shift; + + my @mydel; @@ -167,9 +167,9 @@ Index: IkiWiki/Plugin/missingparents.pm + foreach my $page (@mydel){ + push @{$files}, $page; + } -+} #}}} ++} + -+sub check_matches($) { #{{{ ++sub check_matches($) { + my $page = shift; + return if $IkiWiki::pagesources{$page}; + @@ -183,9 +183,9 @@ Index: IkiWiki/Plugin/missingparents.pm + return $output; + } + return ""; -+} #}}} ++} + -+sub needsbuild ($) { #{{{ ++sub needsbuild ($) { + my $files=shift; + my @new; + @@ -209,7 +209,7 @@ Index: IkiWiki/Plugin/missingparents.pm + $ownfiles{$file} = 1; + push @{$files}, $file; + } -+} #}}} ++} + +1 Index: IkiWiki.pm @@ -227,18 +227,18 @@ Index: IkiWiki.pm our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE @@ -330,6 +336,30 @@ error("failed renaming $newfile to $destdir/$file: $!", $cleanup); - } #}}} + } -+sub newpage($$) { #{{{ ++sub newpage($$) { + my $file=shift; + my $page=shift; + + $pagemtime{$page} = $pagectime{$page} = time; + $pagesources{$page} = $file; + $pagecase{lc $page} = $page; -+} #}}} ++} + -+sub delpage($) { #{{{ ++sub delpage($) { + my $page=shift; + $links{$page}=[]; + $renderedfiles{$page}=[]; @@ -251,10 +251,10 @@ Index: IkiWiki.pm + delete $destsources{$_}; + } + } -+} #}}} ++} + my %cleared; - sub will_render ($$;$) { #{{{ + sub will_render ($$;$) { my $page=shift; </pre> diff --git a/doc/todo/modify_page_filename_in_plugin.mdwn b/doc/todo/modify_page_filename_in_plugin.mdwn index 7c0a909eb..4099487a1 100644 --- a/doc/todo/modify_page_filename_in_plugin.mdwn +++ b/doc/todo/modify_page_filename_in_plugin.mdwn @@ -10,7 +10,7 @@ My solution is to allow plugins to provide a hook that sets the pagename. --[[/u +++ /usr/share/perl5/IkiWiki.pm 2008-10-07 11:57:26.000000000 -0400 @@ -196,11 +196,32 @@ - sub pagename ($) { #{{{ + sub pagename ($) { my $file=shift; my $type=pagetype($file); @@ -27,7 +27,7 @@ My solution is to allow plugins to provide a hook that sets the pagename. --[[/u $page=~s/\Q.$type\E*$// if defined $type; return $page; + } - } #}}} + } - sub htmlpage ($) { #{{{ + sub htmlpage ($) { diff --git a/doc/todo/need_global_renamepage_hook.mdwn b/doc/todo/need_global_renamepage_hook.mdwn new file mode 100644 index 000000000..8265497ae --- /dev/null +++ b/doc/todo/need_global_renamepage_hook.mdwn @@ -0,0 +1,53 @@ +As documented in [[plugins/write]], the current `renamepage` hook is +heavily oriented towards updating links in pages' content: it is run +once per page linking to the renamed page. + +That's fine, but it can't be used to trigger more general actions on +page rename. E.g. it won't be run at all if the page being renamed is +an orphan one. + +This is a real issue for the [[plugins/contrib/po]] development: what +I'm about to achieve is: + +- when a master page is renamed, the plugin takes notice of it (using + the `rename` hook), and later renames the translation pages + accordingly (in the `change` hook) +- when a master page is deleted, the plugin deletes its translations + (using the `delete` hook) + +With the current `renamepage` hook behavior, combining these two goals +has an annoying drawback: a plugin can't notice an orphan master page +has been renamed, so instead of renaming (and preserving) its +translations, it considers the oldpage as deleted, and deletes its +translations. Game over. + +It may seem like a corner case, but I want to be very careful when +deleting files automatically in `srcdir`, which is not always under +version control. + +As an sad workaround, I can still disable any deletion in `srcdir` +when it is not under version control. But I think ikiwiki deserves +a global `renamepage` hook that would be run once per rename +operation. + +My proposal is thus: + +- keep the documented `renamepage` hook as it is +- use something inspired by the trick `preprocess` uses: when `hook` + is passed an optional "global" parameter, set to a true value, the + declared `renamepage` hook is run once per rename operation, and is + passed named parameters: `src`, `srcfile`, `dest` and `destfile`. + +I'm of course volunteering to implement this, or anything related that +would solve my problem. Hmmm? --[[intrigeri]] + +> I think it would be better to have a different hook that is called for +> renames, since the two hook actions are very different (unlike the +> preprocess hook, which does a very similar thing in scan mode). +> +> Just calling it `rename` seems like a reasonable name, by analogy with +> the `delete` and `change` hooks. +> +> It might make sense to rename `renamepage` to `renamelink` to make it +> clearer what it does. (I'm not very worried about this breaking things, at +> this point.) --[[Joey]] diff --git a/doc/todo/pagespec_relative_to_a_target.mdwn b/doc/todo/pagespec_relative_to_a_target.mdwn index f7b248670..4757988e0 100644 --- a/doc/todo/pagespec_relative_to_a_target.mdwn +++ b/doc/todo/pagespec_relative_to_a_target.mdwn @@ -57,7 +57,7 @@ diff -urNX ignorepats ikiwiki/IkiWiki/Plugin/relative.pm ikidev/IkiWiki/Plugin/r + +package IkiWiki::PageSpec; + -+sub match_relative($$;@) { #{{{ ++sub match_relative($$;@) { + my $parent = shift; + my $spec = shift; + my %params = @_; @@ -69,21 +69,21 @@ diff -urNX ignorepats ikiwiki/IkiWiki/Plugin/relative.pm ikidev/IkiWiki/Plugin/r + } + } + return IkiWiki::FailReason->new("$parent can't match $spec against anything"); -+} #}}} ++} + -+sub match_has_child($$;@) { #{{{ ++sub match_has_child($$;@) { + my $page = shift; + my $childname = shift; + my $spec; -+ if ($childname) { #{{{ ++ if ($childname) { + $spec = "$page/$childname or $page/*/$childname"; -+ } #}}} -+ else { #{{{ ++ } ++ else { + $spec = "$page/*"; -+ } #}}} ++ } + + return match_relative($page, $spec, @_); -+} #}}} ++} + +1 </pre> diff --git a/doc/todo/provide_sha1_for_git_diffurl.mdwn b/doc/todo/provide_sha1_for_git_diffurl.mdwn index 9c8b340de..01aa512f8 100644 --- a/doc/todo/provide_sha1_for_git_diffurl.mdwn +++ b/doc/todo/provide_sha1_for_git_diffurl.mdwn @@ -10,7 +10,7 @@ diffurls of the following form: index 5bef928..164210d 100644 --- a/IkiWiki/Plugin/git.pm +++ b/IkiWiki/Plugin/git.pm - @@ -518,6 +518,7 @@ sub rcs_recentchanges ($) { #{{{ + @@ -518,6 +518,7 @@ sub rcs_recentchanges ($) { my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : ""; $diffurl =~ s/\[\[file\]\]/$file/go; diff --git a/doc/todo/replace_HTML::Template_with_Template_Toolkit.mdwn b/doc/todo/replace_HTML::Template_with_Template_Toolkit.mdwn index dfeacbabd..3b9f6c0fd 100644 --- a/doc/todo/replace_HTML::Template_with_Template_Toolkit.mdwn +++ b/doc/todo/replace_HTML::Template_with_Template_Toolkit.mdwn @@ -54,3 +54,5 @@ the templates. I'd prefer not having to touch Perl though... ----- Yes, Template::Toolkit is very powerful. But I think it's somehow overkill for a wiki. HTML::Template can keep things simple, though. --[weakish](http://weakish.int.eu.org/blog/) + +I'd have to agree that Template::Toolkit is overkill and personally I'm not a fan, but it is very popular (there is even a book) and the new version (3) is alleged to be much more nimble than current version. --[[ajt]] diff --git a/doc/todo/require_CAPTCHA_to_edit.mdwn b/doc/todo/require_CAPTCHA_to_edit.mdwn index 110b4167f..83ba07eb0 100644 --- a/doc/todo/require_CAPTCHA_to_edit.mdwn +++ b/doc/todo/require_CAPTCHA_to_edit.mdwn @@ -91,15 +91,15 @@ ignored. --- a/IkiWiki/Plugin/openid.pm +++ b/IkiWiki/Plugin/openid.pm -@@ -18,6 +18,7 @@ sub getopt () { #{{{ +@@ -18,6 +18,7 @@ sub getopt () { error($@) if $@; Getopt::Long::Configure('pass_through'); GetOptions("openidsignup=s" => \$config{openidsignup}); + GetOptions("openidneedscaptcha=s" => \$config{openidneedscaptcha}); - } #}}} + } - sub formbuilder_setup (@) { #{{{ -@@ -61,6 +62,7 @@ sub formbuilder_setup (@) { #{{{ + sub formbuilder_setup (@) { +@@ -61,6 +62,7 @@ sub formbuilder_setup (@) { # Skip all other required fields in this case. foreach my $field ($form->field) { next if $field eq "openid_url"; @@ -107,7 +107,7 @@ ignored. $form->field(name => $field, required => 0, validate => '/.*/'); } -@@ -96,6 +98,18 @@ sub validate ($$$;$) { #{{{ +@@ -96,6 +98,18 @@ sub validate ($$$;$) { } } @@ -152,19 +152,19 @@ use warnings; use strict; use IkiWiki 2.00; -sub import { #{{{ +sub import { hook(type => "formbuilder_setup", id => "recaptcha", call => \&formbuilder_setup); -} # }}} +} -sub getopt () { #{{{ +sub getopt () { eval q{use Getopt::Long}; error($@) if $@; Getopt::Long::Configure('pass_through'); GetOptions("reCaptchaPubKey=s" => \$config{reCaptchaPubKey}); GetOptions("reCaptchaPrivKey=s" => \$config{reCaptchaPrivKey}); -} #}}} +} -sub formbuilder_setup (@) { #{{{ +sub formbuilder_setup (@) { my %params=@_; my $form=$params{form}; @@ -274,7 +274,7 @@ EOTAGS }); } } -} # }}} +} # The following function is borrowed from # Captcha::reCAPTCHA by Andy Armstrong and are under the PERL Artistic License diff --git a/doc/todo/source_link.mdwn b/doc/todo/source_link.mdwn index 93791c81a..b051361a8 100644 --- a/doc/todo/source_link.mdwn +++ b/doc/todo/source_link.mdwn @@ -1,6 +1,6 @@ How about a direct link from the page header to the source of the latest version, to avoid the need to either use edit or navigate to the current version via the history link? - I'd like this too (and might try to implement it). -- [[jondowland]] + I'd like this too (and might try to implement it). -- [[users/jon]] I just implemented this. There is one [[patch]] to the default page template, and a new plugin. -- [[Will]] @@ -31,13 +31,13 @@ I just implemented this. There is one [[patch]] to the default page template, a use IkiWiki; use open qw{:utf8 :std}; - sub import { #{{{ + sub import { hook(type => "getsetup", id => "getsource", call => \&getsetup); hook(type => "pagetemplate", id => "getsource", call => \&pagetemplate); hook(type => "sessioncgi", id => "getsource", call => \&cgi_getsource); - } # }}} + } - sub getsetup () { #{{{ + sub getsetup () { return plugin => { safe => 1, @@ -50,9 +50,9 @@ I just implemented this. There is one [[patch]] to the default page template, a safe => 1, rebuild => 0, }, - } #}}} + } - sub pagetemplate (@) { #{{{ + sub pagetemplate (@) { my %params=@_; my $page=$params{page}; @@ -62,9 +62,9 @@ I just implemented this. There is one [[patch]] to the default page template, a $template->param(getsourceurl => IkiWiki::cgiurl(do => "getsource", page => $page)); $template->param(have_actions => 1); } - } # }}} + } - sub cgi_getsource ($$) { #{{{ + sub cgi_getsource ($$) { my $cgi=shift; my $session=shift; diff --git a/doc/todo/structured_page_data.mdwn b/doc/todo/structured_page_data.mdwn index 2a196ed23..22f67cc0a 100644 --- a/doc/todo/structured_page_data.mdwn +++ b/doc/todo/structured_page_data.mdwn @@ -257,21 +257,21 @@ in a large number of other cases. use CGI::FormBuilder; use IkiWiki 2.00; - sub import { #{{{ + sub import { hook(type => "getsetup", id => "form", call => \&getsetup); hook(type => "htmlize", id => "form", call => \&htmlize); hook(type => "sessioncgi", id => "form", call => \&cgi_submit); - } # }}} + } - sub getsetup () { #{{{ + sub getsetup () { return plugin => { safe => 1, rebuild => 1, # format plugin }, - } #}}} + } - sub makeFormFromYAML ($$$) { #{{{ + sub makeFormFromYAML ($$$) { my $page = shift; my $YAMLString = shift; my $q = shift; @@ -350,9 +350,9 @@ in a large number of other cases. # IkiWiki::decode_form_utf8($form); return $form; - } #}}} + } - sub htmlize (@) { #{{{ + sub htmlize (@) { my %params=@_; my $content = $params{content}; my $page = $params{page}; @@ -360,9 +360,9 @@ in a large number of other cases. my $form = makeFormFromYAML($page, $content, undef); return $form->render(submit => 'Update Form'); - } # }}} + } - sub cgi_submit ($$) { #{{{ + sub cgi_submit ($$) { my $q=shift; my $session=shift; @@ -425,11 +425,11 @@ in a large number of other cases. } exit; - } #}}} + } package IkiWiki::PageSpec; - sub match_form_eq ($$;@) { #{{{ + sub match_form_eq ($$;@) { my $page=shift; my $argSet=shift; my @args=split(/,/, $argSet); @@ -460,7 +460,7 @@ in a large number of other cases. } else { return IkiWiki::FailReason->new("field value does not match"); } - } #}}} + } 1 @@ -476,22 +476,22 @@ in a large number of other cases. my $inTable = 0; - sub import { #{{{ + sub import { hook(type => "getsetup", id => "data", call => \&getsetup); hook(type => "needsbuild", id => "data", call => \&needsbuild); hook(type => "preprocess", id => "data", call => \&preprocess, scan => 1); hook(type => "preprocess", id => "datatable", call => \&preprocess_table, scan => 1); # does this need scan? - } # }}} + } - sub getsetup () { #{{{ + sub getsetup () { return plugin => { safe => 1, rebuild => 1, # format plugin }, - } #}}} + } - sub needsbuild (@) { #{{{ + sub needsbuild (@) { my $needsbuild=shift; foreach my $page (keys %pagestate) { if (exists $pagestate{$page}{data}) { @@ -506,7 +506,7 @@ in a large number of other cases. } } - sub preprocess (@) { #{{{ + sub preprocess (@) { my @argslist = @_; my %params=@argslist; @@ -546,9 +546,9 @@ in a large number of other cases. } return $html; - } # }}} + } - sub preprocess_table (@) { #{{{ + sub preprocess_table (@) { my %params=@_; my @lines; @@ -568,11 +568,11 @@ in a large number of other cases. push @lines, '</table>'; return join("\n", @lines); - } #}}} + } package IkiWiki::PageSpec; - sub match_data_eq ($$;@) { #{{{ + sub match_data_eq ($$;@) { my $page=shift; my $argSet=shift; my @args=split(/,/, $argSet); @@ -592,9 +592,9 @@ in a large number of other cases. } else { return IkiWiki::FailReason->new("value does not match"); } - } #}}} + } - sub match_data_link ($$;@) { #{{{ + sub match_data_link ($$;@) { my $page=shift; my $argSet=shift; my @params=@_; @@ -618,6 +618,6 @@ in a large number of other cases. } return IkiWiki::FailReason->new("No data link on page $page with key $key matches glob $value"); - } #}}} + } 1 diff --git a/doc/todo/support_creole_markup.mdwn b/doc/todo/support_creole_markup.mdwn index b0ebf5b9e..5a1e1286d 100644 --- a/doc/todo/support_creole_markup.mdwn +++ b/doc/todo/support_creole_markup.mdwn @@ -12,7 +12,7 @@ And there is a perl module: Text::WikiCreole Syntax file for vim: http://www.peter-hoffmann.com/code/vim/ (Since a typical ikiwiki user usually use external editors. :)) -> Should be pretty easy to add a plugin to do it using [[cpan +> Should be pretty easy to add a plugin to do it using [[!cpan > Text::WikiCreole]]. --[[Joey]] [[done]] diff --git a/doc/todo/supporting_comments_via_disussion_pages.mdwn b/doc/todo/supporting_comments_via_disussion_pages.mdwn index e0495c8c2..aae0b3008 100644 --- a/doc/todo/supporting_comments_via_disussion_pages.mdwn +++ b/doc/todo/supporting_comments_via_disussion_pages.mdwn @@ -91,14 +91,14 @@ Each comment is processed to something like this: use strict; use IkiWiki '1.02'; - sub import { #{{{ + sub import { hook(type => "formbuilder_setup", id => "comments", call => \&formbuilder_setup); hook(type => "preprocess", id => "blogcomment", call => \&preprocess); - } # }}} + } - sub formbuilder_setup (@) { #{{{ + sub formbuilder_setup (@) { my %params=@_; my $cgi = $params{cgi}; my $form = $params{form}; @@ -138,9 +138,9 @@ Each comment is processed to something like this: $content.=qq{[[!blogcomment from="""$name""" timestamp="""$timestamp""" subject="""$subject""" text="""$comment"""]]\n\n}; $content=~s/\n/\r\n/g; $form->field(name => "editcontent", value => $content, force => 1); - } # }}} + } - sub preprocess (@) { #{{{ + sub preprocess (@) { my %params=@_; my ($text, $date, $from, $subject, $r); @@ -159,7 +159,7 @@ Each comment is processed to something like this: $r .= "</dl>\n" . $text . "</div>\n"; return $r; - } # }}} + } 1; @@ -213,3 +213,8 @@ do you think so far? Known issues include: un-wikiish). --[[smcv]] + +I've updated smcvpostcomment and publicised it as [[plugins/contrib/comments]]. --[[smcv]] + +> While there is still room for improvement and entirely other approaches, +> I am calling this done since smcv's comments plugin is ready. --[[Joey]] diff --git a/doc/todo/syntax_highlighting.mdwn b/doc/todo/syntax_highlighting.mdwn index 2bdeb62be..d9a791c6f 100644 --- a/doc/todo/syntax_highlighting.mdwn +++ b/doc/todo/syntax_highlighting.mdwn @@ -7,16 +7,16 @@ pages, as well as doing syntax highlighting as a preprocessor directive ## The big list of possibilities -* [[plugins/contrib/highlightcode]] uses [[cpan Syntax::Highlight::Engine::Kate]], +* [[plugins/contrib/highlightcode]] uses [[!cpan Syntax::Highlight::Engine::Kate]], operates on whole source files only, has a few bugs (see [here](http://u32.net/Highlight_Code_Plugin/), and needs to be updated to support [[bugs/multiple_pages_with_same_name]]. -* [[cpan IkiWiki-Plugin-syntax]] only operates as a directive. +* [[!cpan IkiWiki-Plugin-syntax]] only operates as a directive. Interestingly, it supports multiple highlighting backends, including Kate and Vim. * [[plugins/contrib/syntax]] only operates as a directive ([[not_on_source_code_files|automatic_use_of_syntax_plugin_on_source_code_files]]), - and uses [[cpan Text::VimColor]]. + and uses [[!cpan Text::VimColor]]. * [[plugins/contrib/sourcehighlight]] uses src-highlight, and operates on whole source files only. Needs to be updated to support [[bugs/multiple_pages_with_same_name]]. @@ -43,7 +43,7 @@ pages, as well as doing syntax highlighting as a preprocessor directive inside source files. Doing this probably means post-processing the results of the highlighting engine, to find places where it's highlighted comments, and then running them through the ikiwiki rendering pipeline. - This seems fairly doable with [[cpan Syntax::Highlight::Engine::Kate]], + This seems fairly doable with [[!cpan Syntax::Highlight::Engine::Kate]], at least. * The whole-file plugins tend to have a problem that things that look like wikilinks in the source code get munged into links by ikiwiki, which can @@ -90,7 +90,7 @@ like this: index 8d728c9..1bd46a9 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -618,6 +618,8 @@ sub pagetype ($) { #{{{ + @@ -618,6 +618,8 @@ sub pagetype ($) { if ($page =~ /\.([^.]+)$/) { return $1 if exists $hooks{htmlize}{$1}; @@ -98,7 +98,7 @@ like this: + return $page; } return; - } #}}} + } ## format directive diff --git a/doc/todo/tidy_git__39__s_ctime_debug_output.mdwn b/doc/todo/tidy_git__39__s_ctime_debug_output.mdwn index aaa040ec7..bfc130d69 100644 --- a/doc/todo/tidy_git__39__s_ctime_debug_output.mdwn +++ b/doc/todo/tidy_git__39__s_ctime_debug_output.mdwn @@ -10,6 +10,6 @@ + debug("ctime for '$file': ". localtime($ctime)); return $ctime; - } #}}} + } [[!tag patch done]] diff --git a/doc/todo/tmplvars_plugin.mdwn b/doc/todo/tmplvars_plugin.mdwn index f7d06a579..644cf23aa 100644 --- a/doc/todo/tmplvars_plugin.mdwn +++ b/doc/todo/tmplvars_plugin.mdwn @@ -11,12 +11,12 @@ A simple plugin to allow per-page customization of a template by passing paramat my %tmplvars; - sub import { #{{{ + sub import { hook(type => "preprocess", id => "tmplvars", call => \&preprocess); hook(type => "pagetemplate", id => "tmplvars", call => \&pagetemplate); - } # }}} + } - sub preprocess (@) { #{{{ + sub preprocess (@) { my %params=@_; if ($params{page} eq $params{destpage}) { @@ -34,9 +34,9 @@ A simple plugin to allow per-page customization of a template by passing paramat } } - } # }}} + } - sub pagetemplate (@) { #{{{ + sub pagetemplate (@) { my %params=@_; my $template = $params{template}; @@ -47,6 +47,6 @@ A simple plugin to allow per-page customization of a template by passing paramat } return undef; - } # }}} + } 1 diff --git a/doc/todo/toc_plugin:_set_a_header_ceiling___40__opposite_of_levels__61____41__.mdwn b/doc/todo/toc_plugin:_set_a_header_ceiling___40__opposite_of_levels__61____41__.mdwn new file mode 100644 index 000000000..547c7a80a --- /dev/null +++ b/doc/todo/toc_plugin:_set_a_header_ceiling___40__opposite_of_levels__61____41__.mdwn @@ -0,0 +1,3 @@ +It would be nice if the [[plugins/toc]] plugin let you specify a header level "ceiling" above which (or above and including which) the headers would not be incorporated into the toc. + +Currently, the levels=X parameter lets you tweak how deep it will go for small headers, but I'd like to chop off the h1's (as I use them for my page title) -- [[Jon]] diff --git a/doc/todo/tracking_bugs_with_dependencies.mdwn b/doc/todo/tracking_bugs_with_dependencies.mdwn index 3af0458bd..2832e37aa 100644 --- a/doc/todo/tracking_bugs_with_dependencies.mdwn +++ b/doc/todo/tracking_bugs_with_dependencies.mdwn @@ -194,9 +194,9 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W index 4e4da11..8b3cdfe 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm - @@ -1550,7 +1550,16 @@ sub globlist_to_pagespec ($) { #{{{ + @@ -1550,7 +1550,16 @@ sub globlist_to_pagespec ($) { - sub is_globlist ($) { #{{{ + sub is_globlist ($) { my $s=shift; - return ( $s =~ /[^\s]+\s+([^\s]+)/ && $1 ne "and" && $1 ne "or" ); + return ! ($s =~ / @@ -209,19 +209,19 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W + ) | + (\s and \s) | (\s or \s) # or we find 'and' or 'or' somewhere + /xs); - } #}}} + } - sub safequote ($) { #{{{ - @@ -1631,7 +1640,7 @@ sub pagespec_merge ($$) { #{{{ + sub safequote ($) { + @@ -1631,7 +1640,7 @@ sub pagespec_merge ($$) { return "($a) or ($b)"; - } #}}} + } - -sub pagespec_translate ($) { #{{{ - +sub pagespec_makeperl ($) { #{{{ + -sub pagespec_translate ($) { + +sub pagespec_makeperl ($) { my $spec=shift; # Support for old-style GlobLists. - @@ -1650,12 +1659,14 @@ sub pagespec_translate ($) { #{{{ + @@ -1650,12 +1659,14 @@ sub pagespec_translate ($) { | \) # ) | @@ -238,7 +238,7 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W my $word=$1; if (lc $word eq 'and') { $code.=' &&'; - @@ -1666,16 +1677,23 @@ sub pagespec_translate ($) { #{{{ + @@ -1666,16 +1677,23 @@ sub pagespec_translate ($) { elsif ($word eq "(" || $word eq ")" || $word eq "!") { $code.=' '.$word; } @@ -265,14 +265,14 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W } } - @@ -1683,8 +1701,18 @@ sub pagespec_translate ($) { #{{{ + @@ -1683,8 +1701,18 @@ sub pagespec_translate ($) { $code=0; } + return 'sub { my $page=shift; my %params = @_; '.$code.' }'; - +} #}}} + +} + - +sub pagespec_translate ($) { #{{{ + +sub pagespec_translate ($) { + my $spec=shift; + + my $code = pagespec_makeperl($spec); @@ -282,19 +282,19 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W no warnings; - return eval 'sub { my $page=shift; '.$code.' }'; + return eval $code; - } #}}} + } - sub pagespec_match ($$;@) { #{{{ - @@ -1699,7 +1727,7 @@ sub pagespec_match ($$;@) { #{{{ + sub pagespec_match ($$;@) { + @@ -1699,7 +1727,7 @@ sub pagespec_match ($$;@) { my $sub=pagespec_translate($spec); return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"") if $@; - return $sub->($page, @params); + return $sub->($page, @params, specFuncs => {}); - } #}}} + } - sub pagespec_valid ($) { #{{{ - @@ -1748,11 +1776,78 @@ sub new { #{{{ + sub pagespec_valid ($) { + @@ -1748,11 +1776,78 @@ sub new { package IkiWiki::PageSpec; @@ -361,7 +361,7 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W + } +} + - sub match_glob ($$;@) { #{{{ + sub match_glob ($$;@) { my $page=shift; my $glob=shift; my %params=@_; @@ -373,9 +373,9 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W my $from=exists $params{location} ? $params{location} : ''; # relative matching - @@ -1782,11 +1877,12 @@ sub match_internal ($$;@) { #{{{ + @@ -1782,11 +1877,12 @@ sub match_internal ($$;@) { - sub match_link ($$;@) { #{{{ + sub match_link ($$;@) { my $page=shift; - my $link=lc(shift); + my $fulllink=shift; @@ -388,7 +388,7 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W # relative matching if ($link =~ m!^\.! && defined $from) { $from=~s#/?[^/]+$##; - @@ -1804,19 +1900,32 @@ sub match_link ($$;@) { #{{{ + @@ -1804,19 +1900,32 @@ sub match_link ($$;@) { } else { return IkiWiki::SuccessReason->new("$page links to page $p matching $link") @@ -397,9 +397,9 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W } } return IkiWiki::FailReason->new("$page does not link to $link"); - } #}}} + } - sub match_backlink ($$;@) { #{{{ + sub match_backlink ($$;@) { - return match_link($_[1], $_[0], @_); + my $page=shift; + my $backlink=shift; @@ -410,9 +410,9 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W + } + + return match_link($backlink, $page, @params); - } #}}} + } - sub match_created_before ($$;@) { #{{{ + sub match_created_before ($$;@) { my $page=shift; my $testpage=shift; + my @params=@_; @@ -423,8 +423,8 @@ account all comments above (which doesn't mean it is above reproach :) ). --[[W if (exists $IkiWiki::pagectime{$testpage}) { if ($IkiWiki::pagectime{$page} < $IkiWiki::pagectime{$testpage}) { - @@ -1834,6 +1943,11 @@ sub match_created_before ($$;@) { #{{{ - sub match_created_after ($$;@) { #{{{ + @@ -1834,6 +1943,11 @@ sub match_created_before ($$;@) { + sub match_created_after ($$;@) { my $page=shift; my $testpage=shift; + my @params=@_; diff --git a/doc/todo/turn_edittemplate_verbosity_off_by_default.mdwn b/doc/todo/turn_edittemplate_verbosity_off_by_default.mdwn index 87e55685c..14bb43782 100644 --- a/doc/todo/turn_edittemplate_verbosity_off_by_default.mdwn +++ b/doc/todo/turn_edittemplate_verbosity_off_by_default.mdwn @@ -8,7 +8,7 @@ I think this (untested) patch might just do the trick: --- a/IkiWiki/Plugin/edittemplate.pm +++ b/IkiWiki/Plugin/edittemplate.pm - @@ -46,8 +46,13 @@ sub preprocess (@) { #{{{ + @@ -46,8 +46,13 @@ sub preprocess (@) { $pagestate{$params{page}}{edittemplate}{$params{match}}=$params{template}; @@ -21,9 +21,9 @@ I think this (untested) patch might just do the trick: + else { + return ''; + } - } # }}} + } - sub formbuilder (@) { #{{{ + sub formbuilder (@) { --[[madduck]] diff --git a/doc/todo/using_meta_titles_for_parentlinks.html b/doc/todo/using_meta_titles_for_parentlinks.html index d04e5a300..6da40a415 100644 --- a/doc/todo/using_meta_titles_for_parentlinks.html +++ b/doc/todo/using_meta_titles_for_parentlinks.html @@ -82,9 +82,9 @@ diff -c /usr/share/perl5/IkiWiki/Plugin/meta.pm.distrib /usr/share/perl5/IkiWiki *** 289,294 **** --- 290,319 ---- } - } #}}} + } -+ sub IkiWiki::pagetitle ($;$) { #{{{ ++ sub IkiWiki::pagetitle ($;$) { + my $page=shift; + my $unescaped=shift; + @@ -106,11 +106,11 @@ diff -c /usr/share/perl5/IkiWiki/Plugin/meta.pm.distrib /usr/share/perl5/IkiWiki + } + + return $page; -+ } #}}} ++ } + package IkiWiki::PageSpec; - sub match_title ($$;@) { #{{{ + sub match_title ($$;@) { </pre> diff --git a/doc/todo/varioki_--_add_template_variables___40__with_closures_for_values__41___in_ikiwiki.setup.mdwn b/doc/todo/varioki_--_add_template_variables___40__with_closures_for_values__41___in_ikiwiki.setup.mdwn index 492a32b36..b28469993 100644 --- a/doc/todo/varioki_--_add_template_variables___40__with_closures_for_values__41___in_ikiwiki.setup.mdwn +++ b/doc/todo/varioki_--_add_template_variables___40__with_closures_for_values__41___in_ikiwiki.setup.mdwn @@ -157,9 +157,9 @@ ManojSrivastava +=cut + + -+sub import { #{{{ ++sub import { + hook(type => "pagetemplate", id => "varioki", call => \&pagetemplate); -+} # }}} ++} + + +=pod @@ -175,7 +175,7 @@ ManojSrivastava + +=cut + -+sub pagetemplate (@) { #{{{ ++sub pagetemplate (@) { + my %params=@_; + my $page=$params{page}; + my $template=$params{template}; @@ -207,7 +207,7 @@ ManojSrivastava + $template->param("$var" =>"$value"); + } + } -+} # }}} ++} + +1; + diff --git a/doc/users/StevenBlack.mdwn b/doc/users/StevenBlack.mdwn new file mode 100644 index 000000000..ea7a6a97a --- /dev/null +++ b/doc/users/StevenBlack.mdwn @@ -0,0 +1,5 @@ +It feels like there are a lot of people named Steven Black. While I'm just one of many with my name, sometimes it is actually just me and I've forgotten that I had an account somewhere. + +I'm not a doctor, though I would certainly trust any doctor, dentist, or philosopher named Steven Black. (There are several.) + +I *am* a huge Ikiwiki fan. I've had my eye on it for many years for personal projects (though I never quite got around to installing it). Recently, however, I managed to convince my coworkers that it would be a good idea for an internal wiki. Boy was I right. The thing is practically designed to be the perfect developer-centered wiki. diff --git a/doc/users/ajt.mdwn b/doc/users/ajt.mdwn new file mode 100644 index 000000000..bc47040b6 --- /dev/null +++ b/doc/users/ajt.mdwn @@ -0,0 +1,20 @@ +[[!meta title="Adam Trickett"]] + +# Adam Trickett + +## "ajt" + +I'm a long time hacker of sorts, I like to program in Perl on Debian systems but work pays me to program in ABAP (COBOL) on SAP. + +I like wikis and I'm currently in love with ikiwiki, having moved my home intranet from a home made template solution to ikiwiki over a weekend. I'm using ikiwiki more like a web content management system (e.g. RedDot) rather than a traditional wiki. + +### My Links + +* [iredale dot net](http://www.iredale.net/) my web server and main blog +* [ajt](http://www.perlmonks.org/index.pl?node_id=113686) my Perkmonks home node +* [ajt](http://use.perl.org/~ajt) my use Perl home +* [ATRICKETT](http://search.cpan.org/~atrickett/) my CPAN folder +* [ajt](http://www.debian-administration.org/users/ajt) my Debian-Administration home (good site btw) +* [drajt](http://www.linkedin.com/in/drajt) my LinkedIn profile +* [drajt](http://www.slideshare.net/drajt) my "Slidespace" on SlideShare +* [AdamTrickett](http://www.hants.lug.org.uk/cgi-bin/wiki.pl?AdamTrickett) my wiki page on my LUG's site diff --git a/doc/users/jondowland.mdwn b/doc/users/jon.mdwn index 8bfa3dd76..3e22ded1d 100644 --- a/doc/users/jondowland.mdwn +++ b/doc/users/jon.mdwn @@ -1,7 +1,10 @@ +[[!meta title="Jon Dowland"]] I'm looking at ikiwiki both for my personal site but also as a team-documentation management system for a small-sized group of UNIX sysadmins. +* my edits should appear either as 'Jon' (if I've used + [[tips/untrusted_git_push]]) or 'alcopop.org/me/openid/'. * My [homepage](http://jmtd.net/) is powered by ikiwiki (replacing my [older homepage](http://alcopop.org/), which was a mess of scripts) I gave a talk at the [UK UNIX User's Group](http://www.ukuug.org/) annual diff --git a/doc/users/smcv.mdwn b/doc/users/smcv.mdwn index 33ae450b2..c52aa8f0f 100644 --- a/doc/users/smcv.mdwn +++ b/doc/users/smcv.mdwn @@ -1 +1,10 @@ -I'm trying to add enough features/fix enough bugs to convert [smcv.pseudorandom.co.uk](http://smcv.pseudorandom.co.uk/) from Django + Python + misc hacks to ikiwiki. +Website: [pseudorandom.co.uk](http://www.pseudorandom.co.uk/) + +Blog: [smcv.pseudorandom.co.uk](http://smcv.pseudorandom.co.uk/) + +My repository containing ikiwiki branches: + +* gitweb: http://git.pseudorandom.co.uk/smcv/ikiwiki.git +* anongit: git://git.pseudorandom.co.uk/git/smcv/ikiwiki.git + +Currently working on the [[plugins/contrib/comments]] plugin. diff --git a/doc/users/weakishjiang.mdwn b/doc/users/weakishjiang.mdwn new file mode 100644 index 000000000..0cafb4653 --- /dev/null +++ b/doc/users/weakishjiang.mdwn @@ -0,0 +1,4 @@ +[My blog](http://millenniumdark.blog.ubuntu.org.cn) + +> So, you're learning haskell. You know, I want to add support for haskell +> external plugins to ikiwiki.. :-) --[[Joey]] diff --git a/doc/users/xma.mdwn b/doc/users/xma.mdwn index 97a8ef869..89f2ff74c 100644 --- a/doc/users/xma.mdwn +++ b/doc/users/xma.mdwn @@ -9,7 +9,7 @@ Anyway, [[ikiwiki]] is really *awesome* ! ## More about me -I am CLI user living in the linux console. More precisely, I live in an [[GNU Emacs]] frame all day long. My main computer is an EeePC 901 running Slackware GNU/Linux 12.1. I do not have X installed (too lazy) but when in X, I am running an instance of [[CLFSWM]]. +I am CLI user living in the linux console. More precisely, I live in an [[GNU_Emacs]] frame all day long. My main computer is an EeePC 901 running Slackware GNU/Linux 12.1. I do not have X installed (too lazy) but when in X, I am running an instance of [[CLFSWM]]. ## Contacting me diff --git a/doc/wikitemplates.mdwn b/doc/wikitemplates.mdwn index babd70211..dc217cd30 100644 --- a/doc/wikitemplates.mdwn +++ b/doc/wikitemplates.mdwn @@ -29,6 +29,10 @@ located in /usr/share/ikiwiki/templates by default. form to wiki pages. * `searchquery.tmpl` - This is an omega template, used by the [[plugins/search]] plugin. +* `comment.tmpl` - This template is used to display a comment + by the [[plugins/comments]] plugin. +* `editcomment.tmpl` - This template is the comment post form for the + [[plugins/comments]] plugin. The [[plugins/pagetemplate]] plugin can allow individual pages to use a different template than `page.tmpl`. |