summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--IkiWiki/CGI.pm35
-rw-r--r--debian/changelog5
2 files changed, 28 insertions, 12 deletions
diff --git a/IkiWiki/CGI.pm b/IkiWiki/CGI.pm
index 8b01ca3f8..3ba6cf7f8 100644
--- a/IkiWiki/CGI.pm
+++ b/IkiWiki/CGI.pm
@@ -116,7 +116,7 @@ sub cgi_signin ($$) { #{{{
eval q{use CGI::FormBuilder};
my $form = CGI::FormBuilder->new(
title => "signin",
- fields => [qw(do title page subpage from name password confirm_password email)],
+ fields => [qw(do title page subpage from name password)],
header => 1,
charset => "utf-8",
method => 'POST',
@@ -145,8 +145,14 @@ sub cgi_signin ($$) { #{{{
$form->field(name => "from", type => "hidden");
$form->field(name => "subpage", type => "hidden");
$form->field(name => "password", type => "password", required => 0);
- $form->field(name => "confirm_password", type => "password", required => 0);
- $form->field(name => "email", required => 0);
+ if ($form->submitted eq "Register" || $form->submitted eq "Create Account") {
+ $form->title("register");
+ $form->text("");
+ $form->field(name => "name", comment => "use FirstnameLastName");
+ $form->fields(qw(do title page subpage from name password confirm_password email));
+ $form->field(name => "confirm_password", type => "password");
+ $form->field(name => "email", type => "text");
+ }
if ($q->param("do") ne "signin" && !$form->submitted) {
$form->text("You need to log in first.");
}
@@ -155,7 +161,8 @@ sub cgi_signin ($$) { #{{{
# Set required fields based on how form was submitted.
my %required=(
"Login" => [qw(name password)],
- "Register" => [qw(name password confirm_password email)],
+ "Register" => [],
+ "Create Account" => [qw(name password confirm_password email)],
"Mail Password" => [qw(name)],
);
foreach my $opt (@{$required{$form->submitted}}) {
@@ -179,7 +186,7 @@ sub cgi_signin ($$) { #{{{
}
# And make sure the entered name exists when logging
# in or sending email, and does not when registering.
- if ($form->submitted eq 'Register') {
+ if ($form->submitted eq 'Create Account') {
$form->field(
name => "name",
validate => sub {
@@ -204,8 +211,6 @@ sub cgi_signin ($$) { #{{{
else {
# First time settings.
$form->field(name => "name", comment => "use FirstnameLastName");
- $form->field(name => "confirm_password", comment => "(only needed");
- $form->field(name => "email", comment => "for registration)");
if ($session->param("name")) {
$form->field(name => "name", value => $session->param("name"));
}
@@ -228,7 +233,7 @@ sub cgi_signin ($$) { #{{{
redirect($q, $config{url});
}
}
- elsif ($form->submitted eq 'Register') {
+ elsif ($form->submitted eq 'Create Account') {
my $user_name=$form->field('name');
if (userinfo_setall($user_name, {
'email' => $form->field('email'),
@@ -237,12 +242,12 @@ sub cgi_signin ($$) { #{{{
})) {
$form->field(name => "confirm_password", type => "hidden");
$form->field(name => "email", type => "hidden");
- $form->text("Registration successful. Now you can Login.");
+ $form->text("Account creation successful. Now you can Login.");
printheader($session);
print misctemplate($form->title, $form->render(submit => ["Login"]));
}
else {
- error("Error saving registration.");
+ error("Error creating account.");
}
}
elsif ($form->submitted eq 'Mail Password') {
@@ -267,9 +272,17 @@ sub cgi_signin ($$) { #{{{
$form->text("Your password has been emailed to you.");
$form->field(name => "name", required => 0);
printheader($session);
- print misctemplate($form->title, $form->render(submit => ["Login", "Register", "Mail Password"]));
+ print misctemplate($form->title, $form->render(submit => ["Login", "Mail Password"]));
+ }
+ elsif ($form->submitted eq "Register") {
+ printheader($session);
+ print misctemplate($form->title, $form->render(submit => ["Create Account"]));
}
}
+ elsif ($form->submitted eq "Create Account") {
+ printheader($session);
+ print misctemplate($form->title, $form->render(submit => ["Create Account"]));
+ }
else {
printheader($session);
print misctemplate($form->title, $form->render(submit => ["Login", "Register", "Mail Password"]));
diff --git a/debian/changelog b/debian/changelog
index 13f7ab794..5e01b4679 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -20,8 +20,11 @@ ikiwiki (1.32) UNRELEASED; urgency=low
them together.
* Install the source of the examples into /usr/share/doc/ikiwiki/examples.
* Add perlmagick to build-depends so syntax check of img plugin works.
+ * Improve login/register process, the login dialog has only name and
+ password fields, which allows more web browsers to regognise it as a login
+ field, and is less confusing.
- -- Joey Hess <joeyh@debian.org> Mon, 30 Oct 2006 14:30:54 -0500
+ -- Joey Hess <joeyh@debian.org> Mon, 30 Oct 2006 18:26:55 -0500
ikiwiki (1.31) unstable; urgency=low
">,
  • },
  • }
  • sub checkconfig () {
  • if (! defined $config{aggregateinternal}) {
  • $config{aggregateinternal}=1;
  • }
  • if ($config{aggregate} && ! ($config{post_commit} &&
  • IkiWiki::commit_hook_enabled())) {
  • launchaggregation();
  • }
  • }
  • sub cgi ($) {
  • my $cgi=shift;
  • if (defined $cgi->param('do') &&
  • $cgi->param("do") eq "aggregate_webtrigger") {
  • $|=1;
  • print "Content-Type: text/plain\n\n";
  • $config{cgi}=0;
  • $config{verbose}=1;
  • $config{syslog}=0;
  • print gettext("Aggregation triggered via web.")."\n\n";
  • if (launchaggregation()) {
  • IkiWiki::lockwiki();
  • IkiWiki::loadindex();
  • require IkiWiki::Render;
  • IkiWiki::refresh();
  • IkiWiki::saveindex();
  • }
  • else {
  • print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
  • }
  • exit 0;
  • }
  • }
  • sub launchaggregation () {
  • # See if any feeds need aggregation.
  • loadstate();
  • my @feeds=needsaggregate();
  • return unless @feeds;
  • if (! lockaggregate()) {
  • debug("an aggregation process is already running");
  • return;
  • }
  • # force a later rebuild of source pages
  • $IkiWiki::forcerebuild{$_->{sourcepage}}=1
  • foreach @feeds;
  • # Fork a child process to handle the aggregation.
  • # The parent process will then handle building the
  • # result. This avoids messy code to clear state
  • # accumulated while aggregating.
  • defined(my $pid = fork) or error("Can't fork: $!");
  • if (! $pid) {
  • IkiWiki::loadindex();
  • # Aggregation happens without the main wiki lock
  • # being held. This allows editing pages etc while
  • # aggregation is running.
  • aggregate(@feeds);
  • IkiWiki::lockwiki;
  • # Merge changes, since aggregation state may have
  • # changed on disk while the aggregation was happening.
  • mergestate();
  • expire();
  • savestate();
  • IkiWiki::unlockwiki;
  • exit 0;
  • }
  • waitpid($pid,0);
  • if ($?) {
  • error "aggregation failed with code $?";
  • }
  • clearstate();
  • unlockaggregate();
  • return 1;
  • }
  • # Pages with extension _aggregated have plain html markup, pass through.
  • sub htmlize (@) {
  • my %params=@_;
  • return $params{content};
  • }
  • # Used by ikiwiki-transition aggregateinternal.
  • sub migrate_to_internal {
  • if (! lockaggregate()) {
  • error("an aggregation process is currently running");
  • }
  • IkiWiki::lockwiki();
  • loadstate();
  • $config{verbose}=1;
  • foreach my $data (values %guids) {
  • next unless $data->{page};
  • next if $data->{expired};
  • $config{aggregateinternal} = 0;
  • my $oldname = "$config{srcdir}/".htmlfn($data->{page});
  • my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
  • $config{aggregateinternal} = 1;
  • my $newname = "$config{srcdir}/".htmlfn($data->{page});
  • debug "moving $oldname -> $newname";
  • if (-e $newname) {
  • if (-e $oldname) {
  • error("$newname already exists");
  • }
  • else {
  • debug("already renamed to $newname?");
  • }
  • }
  • elsif (-e $oldname) {
  • rename($oldname, $newname) || error("$!");
  • }
  • else {
  • debug("$oldname not found");
  • }
  • if (-e $oldoutput) {
  • require IkiWiki::Render;
  • debug("removing output file $oldoutput");
  • IkiWiki::prune($oldoutput);
  • }
  • }
  • savestate();
  • IkiWiki::unlockwiki;
  • unlockaggregate();
  • }
  • sub needsbuild (@) {
  • my $needsbuild=shift;
  • loadstate();
  • foreach my $feed (values %feeds) {
  • if (exists $pagesources{$feed->{sourcepage}} &&
  • grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
  • # Mark all feeds originating on this page as
  • # not yet seen; preprocess will unmark those that
  • # still exist.
  • markunseen($feed->{sourcepage});
  • }
  • }
  • }
  • sub preprocess (@) {
  • my %params=@_;
  • foreach my $required (qw{name url}) {
  • if (! exists $params{$required}) {
  • error sprintf(gettext("missing %s parameter"), $required)
  • }
  • }
  • my $feed={};
  • my $name=$params{name};
  • if (exists $feeds{$name}) {
  • $feed=$feeds{$name};
  • }
  • else {
  • $feeds{$name}=$feed;
  • }
  • $feed->{name}=$name;
  • $feed->{sourcepage}=$params{page};
  • $feed->{url}=$params{url};
  • my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
  • $dir=~s/^\/+//;
  • ($dir)=$dir=~/$config{wiki_file_regexp}/;
  • $feed->{dir}=$dir;
  • $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
  • $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
  • $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
  • $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
  • if (exists $params{template}) {
  • $params{template}=~s/[^-_a-zA-Z0-9]+//g;
  • }
  • else {
  • $params{template} = "aggregatepost"
  • }
  • $feed->{template}=$params{template} . ".tmpl";
  • delete $feed->{unseen};
  • $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
  • $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
  • $feed->{numposts}=0 unless defined $feed->{numposts};
  • $feed->{newposts}=0 unless defined $feed->{newposts};
  • $feed->{message}=gettext("new feed") unless defined $feed->{message};
  • $feed->{error}=0 unless defined $feed->{error};
  • $feed->{tags}=[];
  • while (@_) {
  • my $key=shift;
  • my $value=shift;
  • if ($key eq 'tag') {
  • push @{$feed->{tags}}, $value;
  • }
  • }
  • return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
  • ($feed->{error} ? "<em>" : "").$feed->{message}.
  • ($feed->{error} ? "</em>" : "").
  • " (".$feed->{numposts}." ".gettext("posts").
  • ($feed->{newposts} ? "; ".$feed->{newposts}.
  • " ".gettext("new") : "").
  • ")";
  • }
  • sub delete (@) {
  • my @files=@_;
  • # Remove feed data for removed pages.
  • foreach my $file (@files) {
  • my $page=pagename($file);
  • markunseen($page);
  • }
  • }
  • sub markunseen ($) {
  • my $page=shift;
  • foreach my $id (keys %feeds) {
  • if ($feeds{$id}->{sourcepage} eq $page) {
  • $feeds{$id}->{unseen}=1;
  • }
  • }
  • }
  • my $state_loaded=0;
  • sub loadstate () {
  • return if $state_loaded;
  • $state_loaded=1;
  • if (-e "$config{wikistatedir}/aggregate") {
  • open(IN, "$config{wikistatedir}/aggregate") ||
  • die "$config{wikistatedir}/aggregate: $!";
  • while (<IN>) {
  • $_=IkiWiki::possibly_foolish_untaint($_);
  • chomp;
  • my $data={};
  • foreach my $i (split(/ /, $_)) {
  • my ($field, $val)=split(/=/, $i, 2);
  • if ($field eq "name" || $field eq "feed" ||
  • $field eq "guid" || $field eq "message") {
  • $data->{$field}=decode_entities($val, " \t\n");
  • }
  • elsif ($field eq "tag") {
  • push @{$data->{tags}}, $val;
  • }
  • else {
  • $data->{$field}=$val;
  • }
  • }
  • if (exists $data->{name}) {
  • $feeds{$data->{name}}=$data;
  • }
  • elsif (exists $data->{guid}) {
  • $guids{$data->{guid}}=$data;
  • }
  • }
  • close IN;
  • }
  • }
  • sub savestate () {
  • return unless $state_loaded;
  • garbage_collect();
  • my $newfile="$config{wikistatedir}/aggregate.new";
  • my $cleanup = sub { unlink($newfile) };
  • open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
  • foreach my $data (values %feeds, values %guids) {
  • my @line;
  • foreach my $field (keys %$data) {
  • if ($field eq "name" || $field eq "feed" ||
  • $field eq "guid" || $field eq "message") {
  • push @line, "$field=".encode_entities($data->{$field}, " \t\n");
  • }
  • elsif ($field eq "tags") {
  • push @line, "tag=$_" foreach @{$data->{tags}};
  • }
  • else {
  • push @line, "$field=".$data->{$field}
  • if defined $data->{$field};
  • }
  • }
  • print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
  • }
  • close OUT || error("save $newfile: $!", $cleanup);
  • rename($newfile, "$config{wikistatedir}/aggregate") ||
  • error("rename $newfile: $!", $cleanup);
  • }
  • sub garbage_collect () {
  • foreach my $name (keys %feeds) {
  • # remove any feeds that were not seen while building the pages
  • # that used to contain them
  • if ($feeds{$name}->{unseen}) {
  • delete $feeds{$name};
  • }
  • }
  • foreach my $guid (values %guids) {
  • # any guid whose feed is gone should be removed
  • if (! exists $feeds{$guid->{feed}}) {
  • unlink "$config{srcdir}/".htmlfn($guid->{page})
  • if exists $guid->{page};
  • delete $guids{$guid->{guid}};
  • }
  • # handle expired guids
  • elsif ($guid->{expired} && exists $guid->{page}) {
  • unlink "$config{srcdir}/".htmlfn($guid->{page});
  • delete $guid->{page};
  • delete $guid->{md5};
  • }
  • }
  • }
  • sub mergestate () {
  • # Load the current state in from disk, and merge into it
  • # values from the state in memory that might have changed
  • # during aggregation.
  • my %myfeeds=%feeds;
  • my %myguids=%guids;
  • clearstate();
  • loadstate();
  • # All that can change in feed state during aggregation is a few
  • # fields.
  • foreach my $name (keys %myfeeds) {
  • if (exists $feeds{$name}) {
  • foreach my $field (qw{message lastupdate lasttry
  • numposts newposts error}) {
  • $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
  • }
  • }
  • }
  • # New guids can be created during aggregation.
  • # It's also possible that guids were removed from the on-disk state
  • # while the aggregation was in process. That would only happen if
  • # their feed was also removed, so any removed guids added back here
  • # will be garbage collected later.
  • foreach my $guid (keys %myguids) {
  • if (! exists $guids{$guid}) {
  • $guids{$guid}=$myguids{$guid};
  • }
  • }
  • }
  • sub clearstate () {
  • %feeds=();
  • %guids=();
  • $state_loaded=0;
  • }
  • sub expire () {
  • foreach my $feed (values %feeds) {
  • next unless $feed->{expireage} || $feed->{expirecount};
  • my $count=0;
  • my %seen;
  • foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
  • grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
  • values %guids) {
  • if ($feed->{expireage}) {
  • my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
  • if ($days_old > $feed->{expireage}) {
  • debug(sprintf(gettext("expiring %s (%s days old)"),
  • $item->{page}, int($days_old)));
  • $item->{expired}=1;
  • }
  • }
  • elsif ($feed->{expirecount} &&
  • $count >= $feed->{expirecount}) {
  • debug(sprintf(gettext("expiring %s"), $item->{page}));
  • $item->{expired}=1;
  • }
  • else {
  • if (! $seen{$item->{page}}) {
  • $seen{$item->{page}}=1;
  • $count++;
  • }
  • }
  • }
  • }
  • }
  • sub needsaggregate () {
  • return values %feeds if $config{rebuild};
  • return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
  • }
  • sub aggregate (@) {
  • eval q{use XML::Feed};
  • error($@) if $@;
  • eval q{use URI::Fetch};
  • error($@) if $@;
  • foreach my $feed (@_) {
  • $feed->{lasttry}=time;
  • $feed->{newposts}=0;
  • $feed->{message}=sprintf(gettext("last checked %s"),
  • displaytime($feed->{lasttry}));
  • $feed->{error}=0;
  • debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
  • if (! length $feed->{feedurl}) {
  • my @urls=XML::Feed->find_feeds($feed->{url});
  • if (! @urls) {
  • $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • $feed->{feedurl}=pop @urls;
  • }
  • my $res=URI::Fetch->fetch($feed->{feedurl});
  • if (! $res) {
  • $feed->{message}=URI::Fetch->errstr;
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • # lastupdate is only set if we were able to contact the server
  • $feed->{lastupdate}=$feed->{lasttry};
  • if ($res->status == URI::Fetch::URI_GONE()) {
  • $feed->{message}=gettext("feed not found");
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • my $content=$res->content;
  • my $f=eval{XML::Feed->parse(\$content)};
  • if ($@) {
  • # One common cause of XML::Feed crashing is a feed
  • # that contains invalid UTF-8 sequences. Convert
  • # feed to ascii to try to work around.
  • $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
  • $f=eval {
  • $content=Encode::decode_utf8($content, 0);
  • XML::Feed->parse(\$content)
  • };
  • }
  • if ($@) {
  • # Another possibility is badly escaped entities.
  • $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
  • $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
  • $f=eval {
  • $content=Encode::decode_utf8($content, 0);
  • XML::Feed->parse(\$content)
  • };
  • }
  • if ($@) {
  • $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • if (! $f) {
  • $feed->{message}=XML::Feed->errstr;
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • foreach my $entry ($f->entries) {
  • # XML::Feed doesn't work around XML::Atom's bizarre
  • # API, so we will. Real unicode strings? Yes please.
  • # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
  • local $XML::Atom::ForceUnicode = 1;
  • my $c=$entry->content;
  • # atom feeds may have no content, only a summary
  • if (! defined $c && ref $entry->summary) {
  • $c=$entry->summary;
  • }
  • add_page(
  • feed => $feed,
  • copyright => $f->copyright,
  • title => defined $entry->title ? decode_entities($entry->title) : "untitled",
  • link => $entry->link,
  • content => (defined $c && defined $c->body) ? $c->body : "",
  • guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
  • ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
  • base => (defined $c && $c->can("base")) ? $c->base : undef,
  • );
  • }
  • }
  • }
  • sub add_page (@) {
  • my %params=@_;
  • my $feed=$params{feed};
  • my $guid={};
  • my $mtime;
  • if (exists $guids{$params{guid}}) {
  • # updating an existing post
  • $guid=$guids{$params{guid}};
  • return if $guid->{expired};
  • }
  • else {
  • # new post
  • $guid->{guid}=$params{guid};
  • $guids{$params{guid}}=$guid;
  • $mtime=$params{ctime};
  • $feed->{numposts}++;
  • $feed->{newposts}++;
  • # assign it an unused page
  • my $page=titlepage($params{title});
  • # escape slashes and periods in title so it doesn't specify
  • # directory name or trigger ".." disallowing code.
  • $page=~s!([/.])!"__".ord($1)."__"!eg;
  • $page=$feed->{dir}."/".$page;
  • ($page)=$page=~/$config{wiki_file_regexp}/;
  • if (! defined $page || ! length $page) {
  • $page=$feed->{dir}."/item";
  • }
  • my $c="";
  • while (exists $IkiWiki::pagecase{lc $page.$c} ||
  • -e "$config{srcdir}/".htmlfn($page.$c)) {
  • $c++
  • }
  • # Make sure that the file name isn't too long.
  • # NB: This doesn't check for path length limits.
  • my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
  • if (defined $max && length(htmlfn($page)) >= $max) {
  • $c="";
  • $page=$feed->{dir}."/item";
  • while (exists $IkiWiki::pagecase{lc $page.$c} ||
  • -e "$config{srcdir}/".htmlfn($page.$c)) {
  • $c++
  • }
  • }
  • $guid->{page}=$page;
  • debug(sprintf(gettext("creating new page %s"), $page));
  • }
  • $guid->{feed}=$feed->{name};
  • # To write or not to write? Need to avoid writing unchanged pages
  • # to avoid unneccessary rebuilding. The mtime from rss cannot be
  • # trusted; let's use a digest.
  • eval q{use Digest::MD5 'md5_hex'};
  • error($@) if $@;
  • require Encode;
  • my $digest=md5_hex(Encode::encode_utf8($params{content}));
  • return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
  • $guid->{md5}=$digest;
  • # Create the page.
  • my $template=template($feed->{template}, blind_cache => 1);
  • $template->param(title => $params{title})
  • if defined $params{title} && length($params{title});
  • $template->param(content => wikiescape(htmlabs($params{content},
  • defined $params{base} ? $params{base} : $feed->{feedurl})));
  • $template->param(name => $feed->{name});
  • $template->param(url => $feed->{url});
  • $template->param(copyright => $params{copyright})
  • if defined $params{copyright} && length $params{copyright};
  • $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
  • if defined $params{link};
  • if (ref $feed->{tags}) {