summaryrefslogtreecommitdiff
path: root/perl/IkiWiki/Plugin/varioki.pm
diff options
context:
space:
mode:
Diffstat (limited to 'perl/IkiWiki/Plugin/varioki.pm')
0 files changed, 0 insertions, 0 deletions
n class="hl opt">, call => \&preprocess);
  • hook(type => "delete", id => "aggregate", call => \&delete);
  • hook(type => "savestate", id => "aggregate", call => \&savestate);
  • if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
  • hook(type => "cgi", id => "aggregate", call => \&cgi);
  • }
  • } # }}}
  • sub getopt () { #{{{
  • eval q{use Getopt::Long};
  • error($@) if $@;
  • Getopt::Long::Configure('pass_through');
  • GetOptions("aggregate" => \$config{aggregate});
  • } #}}}
  • sub checkconfig () { #{{{
  • if ($config{aggregate} && ! ($config{post_commit} &&
  • IkiWiki::commit_hook_enabled())) {
  • launchaggregation();
  • }
  • } #}}}
  • sub cgi ($) { #{{{
  • my $cgi=shift;
  • if (defined $cgi->param('do') &&
  • $cgi->param("do") eq "aggregate_webtrigger") {
  • $|=1;
  • print "Content-Type: text/plain\n\n";
  • $config{cgi}=0;
  • $config{verbose}=1;
  • $config{syslog}=0;
  • print gettext("Aggregation triggered via web.")."\n\n";
  • if (launchaggregation()) {
  • IkiWiki::lockwiki();
  • IkiWiki::loadindex();
  • require IkiWiki::Render;
  • IkiWiki::refresh();
  • IkiWiki::saveindex();
  • }
  • else {
  • print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
  • }
  • exit 0;
  • }
  • } #}}}
  • sub launchaggregation () { #{{{
  • # See if any feeds need aggregation.
  • loadstate();
  • my @feeds=needsaggregate();
  • return unless @feeds;
  • if (! lockaggregate()) {
  • debug("an aggregation process is already running");
  • return;
  • }
  • # force a later rebuild of source pages
  • $IkiWiki::forcerebuild{$_->{sourcepage}}=1
  • foreach @feeds;
  • # Fork a child process to handle the aggregation.
  • # The parent process will then handle building the
  • # result. This avoids messy code to clear state
  • # accumulated while aggregating.
  • defined(my $pid = fork) or error("Can't fork: $!");
  • if (! $pid) {
  • IkiWiki::loadindex();
  • # Aggregation happens without the main wiki lock
  • # being held. This allows editing pages etc while
  • # aggregation is running.
  • aggregate(@feeds);
  • IkiWiki::lockwiki;
  • # Merge changes, since aggregation state may have
  • # changed on disk while the aggregation was happening.
  • mergestate();
  • expire();
  • savestate();
  • IkiWiki::unlockwiki;
  • exit 0;
  • }
  • waitpid($pid,0);
  • if ($?) {
  • error "aggregation failed with code $?";
  • }
  • clearstate();
  • unlockaggregate();
  • return 1;
  • } #}}}
  • sub needsbuild (@) { #{{{
  • my $needsbuild=shift;
  • loadstate();
  • foreach my $feed (values %feeds) {
  • if (exists $pagesources{$feed->{sourcepage}} &&
  • grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
  • # Mark all feeds originating on this page as
  • # not yet seen; preprocess will unmark those that
  • # still exist.
  • markunseen($feed->{sourcepage});
  • }
  • }
  • } # }}}
  • sub preprocess (@) { #{{{
  • my %params=@_;
  • foreach my $required (qw{name url}) {
  • if (! exists $params{$required}) {
  • return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
  • }
  • }
  • my $feed={};
  • my $name=$params{name};
  • if (exists $feeds{$name}) {
  • $feed=$feeds{$name};
  • }
  • else {
  • $feeds{$name}=$feed;
  • }
  • $feed->{name}=$name;
  • $feed->{sourcepage}=$params{page};
  • $feed->{url}=$params{url};
  • my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
  • $dir=~s/^\/+//;
  • ($dir)=$dir=~/$config{wiki_file_regexp}/;
  • $feed->{dir}=$dir;
  • $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
  • $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
  • $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
  • $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
  • delete $feed->{unseen};
  • $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
  • $feed->{numposts}=0 unless defined $feed->{numposts};
  • $feed->{newposts}=0 unless defined $feed->{newposts};
  • $feed->{message}=gettext("new feed") unless defined $feed->{message};
  • $feed->{error}=0 unless defined $feed->{error};
  • $feed->{tags}=[];
  • while (@_) {
  • my $key=shift;
  • my $value=shift;
  • if ($key eq 'tag') {
  • push @{$feed->{tags}}, $value;
  • }
  • }
  • return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
  • ($feed->{error} ? "<em>" : "").$feed->{message}.
  • ($feed->{error} ? "</em>" : "").
  • " (".$feed->{numposts}." ".gettext("posts").
  • ($feed->{newposts} ? "; ".$feed->{newposts}.
  • " ".gettext("new") : "").
  • ")";
  • } # }}}
  • sub delete (@) { #{{{
  • my @files=@_;
  • # Remove feed data for removed pages.
  • foreach my $file (@files) {
  • my $page=pagename($file);
  • markunseen($page);
  • }
  • } #}}}
  • sub markunseen ($) { #{{{
  • my $page=shift;
  • foreach my $id (keys %feeds) {
  • if ($feeds{$id}->{sourcepage} eq $page) {
  • $feeds{$id}->{unseen}=1;
  • }
  • }
  • } #}}}
  • my $state_loaded=0;
  • sub loadstate () { #{{{
  • return if $state_loaded;
  • $state_loaded=1;
  • if (-e "$config{wikistatedir}/aggregate") {
  • open(IN, "$config{wikistatedir}/aggregate") ||
  • die "$config{wikistatedir}/aggregate: $!";
  • while (<IN>) {
  • $_=IkiWiki::possibly_foolish_untaint($_);
  • chomp;
  • my $data={};
  • foreach my $i (split(/ /, $_)) {
  • my ($field, $val)=split(/=/, $i, 2);
  • if ($field eq "name" || $field eq "feed" ||
  • $field eq "guid" || $field eq "message") {
  • $data->{$field}=decode_entities($val, " \t\n");
  • }
  • elsif ($field eq "tag") {
  • push @{$data->{tags}}, $val;
  • }
  • else {
  • $data->{$field}=$val;
  • }
  • }
  • if (exists $data->{name}) {
  • $feeds{$data->{name}}=$data;
  • }
  • elsif (exists $data->{guid}) {
  • $guids{$data->{guid}}=$data;
  • }
  • }
  • close IN;
  • }
  • } #}}}
  • sub savestate () { #{{{
  • return unless $state_loaded;
  • garbage_collect();
  • my $newfile="$config{wikistatedir}/aggregate.new";
  • my $cleanup = sub { unlink($newfile) };
  • open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
  • foreach my $data (values %feeds, values %guids) {
  • my @line;
  • foreach my $field (keys %$data) {
  • if ($field eq "name" || $field eq "feed" ||
  • $field eq "guid" || $field eq "message") {
  • push @line, "$field=".encode_entities($data->{$field}, " \t\n");
  • }
  • elsif ($field eq "tags") {
  • push @line, "tag=$_" foreach @{$data->{tags}};
  • }
  • else {
  • push @line, "$field=".$data->{$field};
  • }
  • }
  • print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
  • }
  • close OUT || error("save $newfile: $!", $cleanup);
  • rename($newfile, "$config{wikistatedir}/aggregate") ||
  • error("rename $newfile: $!", $cleanup);
  • } #}}}
  • sub garbage_collect () { #{{{
  • foreach my $name (keys %feeds) {
  • # remove any feeds that were not seen while building the pages
  • # that used to contain them
  • if ($feeds{$name}->{unseen}) {
  • delete $feeds{$name};
  • }
  • }
  • foreach my $guid (values %guids) {
  • # any guid whose feed is gone should be removed
  • if (! exists $feeds{$guid->{feed}}) {
  • unlink pagefile($guid->{page})
  • if exists $guid->{page};
  • delete $guids{$guid->{guid}};
  • }
  • # handle expired guids
  • elsif ($guid->{expired} && exists $guid->{page}) {
  • unlink pagefile($guid->{page});
  • delete $guid->{page};
  • delete $guid->{md5};
  • }
  • }
  • } #}}}
  • sub mergestate () { #{{{
  • # Load the current state in from disk, and merge into it
  • # values from the state in memory that might have changed
  • # during aggregation.
  • my %myfeeds=%feeds;
  • my %myguids=%guids;
  • clearstate();
  • loadstate();
  • # All that can change in feed state during aggregation is a few
  • # fields.
  • foreach my $name (keys %myfeeds) {
  • if (exists $feeds{$name}) {
  • foreach my $field (qw{message lastupdate numposts
  • newposts error}) {
  • $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
  • }
  • }
  • }
  • # New guids can be created during aggregation.
  • # It's also possible that guids were removed from the on-disk state
  • # while the aggregation was in process. That would only happen if
  • # their feed was also removed, so any removed guids added back here
  • # will be garbage collected later.
  • foreach my $guid (keys %myguids) {
  • if (! exists $guids{$guid}) {
  • $guids{$guid}=$myguids{$guid};
  • }
  • }
  • } #}}}
  • sub clearstate () { #{{{
  • %feeds=();
  • %guids=();
  • $state_loaded=0;
  • } #}}}
  • sub expire () { #{{{
  • foreach my $feed (values %feeds) {
  • next unless $feed->{expireage} || $feed->{expirecount};
  • my $count=0;
  • my %seen;
  • foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
  • grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
  • values %guids) {
  • if ($feed->{expireage}) {
  • my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
  • if ($days_old > $feed->{expireage}) {
  • debug(sprintf(gettext("expiring %s (%s days old)"),
  • $item->{page}, int($days_old)));
  • $item->{expired}=1;
  • }
  • }
  • elsif ($feed->{expirecount} &&
  • $count >= $feed->{expirecount}) {
  • debug(sprintf(gettext("expiring %s"), $item->{page}));
  • $item->{expired}=1;
  • }
  • else {
  • if (! $seen{$item->{page}}) {
  • $seen{$item->{page}}=1;
  • $count++;
  • }
  • }
  • }
  • }
  • } #}}}
  • sub needsaggregate () { #{{{
  • return values %feeds if $config{rebuild};
  • return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
  • } #}}}
  • sub aggregate (@) { #{{{
  • eval q{use XML::Feed};
  • error($@) if $@;
  • eval q{use URI::Fetch};
  • error($@) if $@;
  • foreach my $feed (@_) {
  • $feed->{lastupdate}=time;
  • $feed->{newposts}=0;
  • $feed->{message}=sprintf(gettext("processed ok at %s"),
  • displaytime($feed->{lastupdate}));
  • $feed->{error}=0;
  • debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
  • if (! length $feed->{feedurl}) {
  • my @urls=XML::Feed->find_feeds($feed->{url});
  • if (! @urls) {
  • $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • $feed->{feedurl}=pop @urls;
  • }
  • my $res=URI::Fetch->fetch($feed->{feedurl});
  • if (! $res) {
  • $feed->{message}=URI::Fetch->errstr;
  • $feed->{error}=1;
  • debug($feed->{message});
  • next;
  • }
  • if ($res->status == URI::Fetch::URI_GONE()) {