summaryrefslogtreecommitdiff
path: root/IkiWiki/Plugin/aggregate.pm
blob: ba40ee6bcb42f57f2f3929db25a5f0fa75aeeb34 (plain)
  1. #!/usr/bin/perl
  2. # Blog aggregation plugin.
  3. package IkiWiki::Plugin::aggregate;
  4. use warnings;
  5. use strict;
  6. use IkiWiki 2.00;
  7. use HTML::Entities;
  8. use HTML::Parser;
  9. use HTML::Tagset;
  10. use URI;
  11. use open qw{:utf8 :std};
  12. my %feeds;
  13. my %guids;
  14. sub import { #{{{
  15. hook(type => "getopt", id => "aggregate", call => \&getopt);
  16. hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
  17. hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
  18. hook(type => "preprocess", id => "aggregate", call => \&preprocess);
  19. hook(type => "delete", id => "aggregate", call => \&delete);
  20. hook(type => "savestate", id => "aggregate", call => \&savestate);
  21. } # }}}
  22. sub getopt () { #{{{
  23. eval q{use Getopt::Long};
  24. error($@) if $@;
  25. Getopt::Long::Configure('pass_through');
  26. GetOptions("aggregate" => \$config{aggregate});
  27. } #}}}
  28. sub checkconfig () { #{{{
  29. if ($config{aggregate} && ! ($config{post_commit} &&
  30. IkiWiki::commit_hook_enabled())) {
  31. # See if any feeds need aggregation.
  32. loadstate();
  33. my @feeds=needsaggregate();
  34. return unless @feeds;
  35. if (! lockaggregate()) {
  36. debug("an aggregation process is already running");
  37. return;
  38. }
  39. # force a later rebuild of source pages
  40. $IkiWiki::forcerebuild{$_->{sourcepage}}=1
  41. foreach @feeds;
  42. # Fork a child process to handle the aggregation.
  43. # The parent process will then handle building the
  44. # result. This avoids messy code to clear state
  45. # accumulated while aggregating.
  46. defined(my $pid = fork) or error("Can't fork: $!");
  47. if (! $pid) {
  48. IkiWiki::loadindex();
  49. # Aggregation happens without the main wiki lock
  50. # being held. This allows editing pages etc while
  51. # aggregation is running.
  52. aggregate(@feeds);
  53. IkiWiki::lockwiki;
  54. # Merge changes, since aggregation state may have
  55. # changed on disk while the aggregation was happening.
  56. mergestate();
  57. expire();
  58. savestate();
  59. IkiWiki::unlockwiki;
  60. exit 0;
  61. }
  62. waitpid($pid,0);
  63. if ($?) {
  64. error "aggregation failed with code $?";
  65. }
  66. clearstate();
  67. unlockaggregate();
  68. }
  69. } #}}}
  70. sub needsbuild (@) { #{{{
  71. my $needsbuild=shift;
  72. loadstate();
  73. foreach my $feed (values %feeds) {
  74. if (exists $pagesources{$feed->{sourcepage}} &&
  75. grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
  76. # Mark all feeds originating on this page as
  77. # not yet seen; preprocess will unmark those that
  78. # still exist.
  79. markunseen($feed->{sourcepage});
  80. }
  81. }
  82. } # }}}
  83. sub preprocess (@) { #{{{
  84. my %params=@_;
  85. foreach my $required (qw{name url}) {
  86. if (! exists $params{$required}) {
  87. return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
  88. }
  89. }
  90. my $feed={};
  91. my $name=$params{name};
  92. if (exists $feeds{$name}) {
  93. $feed=$feeds{$name};
  94. }
  95. else {
  96. $feeds{$name}=$feed;
  97. }
  98. $feed->{name}=$name;
  99. $feed->{sourcepage}=$params{page};
  100. $feed->{url}=$params{url};
  101. my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
  102. $dir=~s/^\/+//;
  103. ($dir)=$dir=~/$config{wiki_file_regexp}/;
  104. $feed->{dir}=$dir;
  105. $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
  106. $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
  107. $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
  108. $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
  109. delete $feed->{unseen};
  110. $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
  111. $feed->{numposts}=0 unless defined $feed->{numposts};
  112. $feed->{newposts}=0 unless defined $feed->{newposts};
  113. $feed->{message}=gettext("new feed") unless defined $feed->{message};
  114. $feed->{error}=0 unless defined $feed->{error};
  115. $feed->{tags}=[];
  116. while (@_) {
  117. my $key=shift;
  118. my $value=shift;
  119. if ($key eq 'tag') {
  120. push @{$feed->{tags}}, $value;
  121. }
  122. }
  123. return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
  124. ($feed->{error} ? "<em>" : "").$feed->{message}.
  125. ($feed->{error} ? "</em>" : "").
  126. " (".$feed->{numposts}." ".gettext("posts").
  127. ($feed->{newposts} ? "; ".$feed->{newposts}.
  128. " ".gettext("new") : "").
  129. ")";
  130. } # }}}
  131. sub delete (@) { #{{{
  132. my @files=@_;
  133. # Remove feed data for removed pages.
  134. foreach my $file (@files) {
  135. my $page=pagename($file);
  136. markunseen($page);
  137. }
  138. } #}}}
  139. sub markunseen ($) { #{{{
  140. my $page=shift;
  141. foreach my $id (keys %feeds) {
  142. if ($feeds{$id}->{sourcepage} eq $page) {
  143. $feeds{$id}->{unseen}=1;
  144. }
  145. }
  146. } #}}}
  147. my $state_loaded=0;
  148. sub loadstate () { #{{{
  149. return if $state_loaded;
  150. $state_loaded=1;
  151. if (-e "$config{wikistatedir}/aggregate") {
  152. open(IN, "$config{wikistatedir}/aggregate") ||
  153. die "$config{wikistatedir}/aggregate: $!";
  154. while (<IN>) {
  155. $_=IkiWiki::possibly_foolish_untaint($_);
  156. chomp;
  157. my $data={};
  158. foreach my $i (split(/ /, $_)) {
  159. my ($field, $val)=split(/=/, $i, 2);
  160. if ($field eq "name" || $field eq "feed" ||
  161. $field eq "guid" || $field eq "message") {
  162. $data->{$field}=decode_entities($val, " \t\n");
  163. }
  164. elsif ($field eq "tag") {
  165. push @{$data->{tags}}, $val;
  166. }
  167. else {
  168. $data->{$field}=$val;
  169. }
  170. }
  171. if (exists $data->{name}) {
  172. $feeds{$data->{name}}=$data;
  173. }
  174. elsif (exists $data->{guid}) {
  175. $guids{$data->{guid}}=$data;
  176. }
  177. }
  178. close IN;
  179. }
  180. } #}}}
  181. sub savestate () { #{{{
  182. return unless $state_loaded;
  183. garbage_collect();
  184. eval q{use HTML::Entities};
  185. error($@) if $@;
  186. my $newfile="$config{wikistatedir}/aggregate.new";
  187. my $cleanup = sub { unlink($newfile) };
  188. open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
  189. foreach my $data (values %feeds, values %guids) {
  190. my @line;
  191. foreach my $field (keys %$data) {
  192. if ($field eq "name" || $field eq "feed" ||
  193. $field eq "guid" || $field eq "message") {
  194. push @line, "$field=".encode_entities($data->{$field}, " \t\n");
  195. }
  196. elsif ($field eq "tags") {
  197. push @line, "tag=$_" foreach @{$data->{tags}};
  198. }
  199. else {
  200. push @line, "$field=".$data->{$field};
  201. }
  202. }
  203. print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
  204. }
  205. close OUT || error("save $newfile: $!", $cleanup);
  206. rename($newfile, "$config{wikistatedir}/aggregate") ||
  207. error("rename $newfile: $!", $cleanup);
  208. } #}}}
  209. sub garbage_collect () { #{{{
  210. foreach my $name (keys %feeds) {
  211. # remove any feeds that were not seen while building the pages
  212. # that used to contain them
  213. if ($feeds{$name}->{unseen}) {
  214. delete $feeds{$name};
  215. }
  216. }
  217. foreach my $guid (values %guids) {
  218. # any guid whose feed is gone should be removed
  219. if (! exists $feeds{$guid->{feed}}) {
  220. unlink pagefile($guid->{page})
  221. if exists $guid->{page};
  222. delete $guids{$guid->{guid}};
  223. }
  224. # handle expired guids
  225. elsif ($guid->{expired} && exists $guid->{page}) {
  226. unlink pagefile($guid->{page});
  227. delete $guid->{page};
  228. delete $guid->{md5};
  229. }
  230. }
  231. } #}}}
  232. sub mergestate () { #{{{
  233. # Load the current state in from disk, and merge into it
  234. # values from the state in memory that might have changed
  235. # during aggregation.
  236. my %myfeeds=%feeds;
  237. my %myguids=%guids;
  238. clearstate();
  239. loadstate();
  240. # All that can change in feed state during aggregation is a few
  241. # fields.
  242. foreach my $name (keys %myfeeds) {
  243. if (exists $feeds{$name}) {
  244. foreach my $field (qw{message lastupdate numposts
  245. newposts error}) {
  246. $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
  247. }
  248. }
  249. }
  250. # New guids can be created during aggregation.
  251. # It's also possible that guids were removed from the on-disk state
  252. # while the aggregation was in process. That would only happen if
  253. # their feed was also removed, so any removed guids added back here
  254. # will be garbage collected later.
  255. foreach my $guid (keys %myguids) {
  256. if (! exists $guids{$guid}) {
  257. $guids{$guid}=$myguids{$guid};
  258. }
  259. }
  260. } #}}}
  261. sub clearstate () { #{{{
  262. %feeds=();
  263. %guids=();
  264. $state_loaded=0;
  265. } #}}}
  266. sub expire () { #{{{
  267. foreach my $feed (values %feeds) {
  268. next unless $feed->{expireage} || $feed->{expirecount};
  269. my $count=0;
  270. my %seen;
  271. foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
  272. grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
  273. values %guids) {
  274. if ($feed->{expireage}) {
  275. my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
  276. if ($days_old > $feed->{expireage}) {
  277. debug(sprintf(gettext("expiring %s (%s days old)"),
  278. $item->{page}, int($days_old)));
  279. $item->{expired}=1;
  280. }
  281. }
  282. elsif ($feed->{expirecount} &&
  283. $count >= $feed->{expirecount}) {
  284. debug(sprintf(gettext("expiring %s"), $item->{page}));
  285. $item->{expired}=1;
  286. }
  287. else {
  288. if (! $seen{$item->{page}}) {
  289. $seen{$item->{page}}=1;
  290. $count++;
  291. }
  292. }
  293. }
  294. }
  295. } #}}}
  296. sub needsaggregate () { #{{{
  297. return values %feeds if $config{rebuild};
  298. return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
  299. } #}}}
  300. sub aggregate (@) { #{{{
  301. eval q{use XML::Feed};
  302. error($@) if $@;
  303. eval q{use URI::Fetch};
  304. error($@) if $@;
  305. eval q{use HTML::Entities};
  306. error($@) if $@;
  307. foreach my $feed (@_) {
  308. $feed->{lastupdate}=time;
  309. $feed->{newposts}=0;
  310. $feed->{message}=sprintf(gettext("processed ok at %s"),
  311. displaytime($feed->{lastupdate}));
  312. $feed->{error}=0;
  313. debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
  314. if (! length $feed->{feedurl}) {
  315. my @urls=XML::Feed->find_feeds($feed->{url});
  316. if (! @urls) {
  317. $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
  318. $feed->{error}=1;
  319. debug($feed->{message});
  320. next;
  321. }
  322. $feed->{feedurl}=pop @urls;
  323. }
  324. my $res=URI::Fetch->fetch($feed->{feedurl});
  325. if (! $res) {
  326. $feed->{message}=URI::Fetch->errstr;
  327. $feed->{error}=1;
  328. debug($feed->{message});
  329. next;
  330. }
  331. if ($res->status == URI::Fetch::URI_GONE()) {
  332. $feed->{message}=gettext("feed not found");
  333. $feed->{error}=1;
  334. debug($feed->{message});
  335. next;
  336. }
  337. my $content=$res->content;
  338. my $f=eval{XML::Feed->parse(\$content)};
  339. if ($@) {
  340. # One common cause of XML::Feed crashing is a feed
  341. # that contains invalid UTF-8 sequences. Convert
  342. # feed to ascii to try to work around.
  343. $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
  344. $content=Encode::decode_utf8($content);
  345. $f=eval{XML::Feed->parse(\$content)};
  346. }
  347. if ($@) {
  348. # Another possibility is badly escaped entities.
  349. $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
  350. $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
  351. $content=Encode::decode_utf8($content);
  352. $f=eval{XML::Feed->parse(\$content)};
  353. }
  354. if ($@) {
  355. $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
  356. $feed->{error}=1;
  357. debug($feed->{message});
  358. next;
  359. }
  360. if (! $f) {
  361. $feed->{message}=XML::Feed->errstr;
  362. $feed->{error}=1;
  363. debug($feed->{message});
  364. next;
  365. }
  366. foreach my $entry ($f->entries) {
  367. add_page(
  368. feed => $feed,
  369. copyright => $f->copyright,
  370. title => defined $entry->title ? decode_entities($entry->title) : "untitled",
  371. link => $entry->link,
  372. content => defined $entry->content->body ? $entry->content->body : "",
  373. guid => defined $entry->id ? $entry->id : time."_".$feed->name,
  374. ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
  375. );
  376. }
  377. }
  378. } #}}}
  379. sub add_page (@) { #{{{
  380. my %params=@_;
  381. my $feed=$params{feed};
  382. my $guid={};
  383. my $mtime;
  384. if (exists $guids{$params{guid}}) {
  385. # updating an existing post
  386. $guid=$guids{$params{guid}};
  387. return if $guid->{expired};
  388. }
  389. else {
  390. # new post
  391. $guid->{guid}=$params{guid};
  392. $guids{$params{guid}}=$guid;
  393. $mtime=$params{ctime};
  394. $feed->{numposts}++;
  395. $feed->{newposts}++;
  396. # assign it an unused page
  397. my $page=IkiWiki::titlepage($params{title});
  398. # escape slashes and periods in title so it doesn't specify
  399. # directory name or trigger ".." disallowing code.
  400. $page=~s!([/.])!"__".ord($1)."__"!eg;
  401. $page=$feed->{dir}."/".$page;
  402. ($page)=$page=~/$config{wiki_file_regexp}/;
  403. if (! defined $page || ! length $page) {
  404. $page=$feed->{dir}."/item";
  405. }
  406. my $c="";
  407. while (exists $IkiWiki::pagecase{lc $page.$c} ||
  408. -e pagefile($page.$c)) {
  409. $c++
  410. }
  411. # Make sure that the file name isn't too long.
  412. # NB: This doesn't check for path length limits.
  413. my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
  414. if (defined $max && length(htmlfn($page)) >= $max) {
  415. $c="";
  416. $page=$feed->{dir}."/item";
  417. while (exists $IkiWiki::pagecase{lc $page.$c} ||
  418. -e pagefile($page.$c)) {
  419. $c++
  420. }
  421. }
  422. $guid->{page}=$page;
  423. debug(sprintf(gettext("creating new page %s"), $page));
  424. }
  425. $guid->{feed}=$feed->{name};
  426. # To write or not to write? Need to avoid writing unchanged pages
  427. # to avoid unneccessary rebuilding. The mtime from rss cannot be
  428. # trusted; let's use a digest.
  429. eval q{use Digest::MD5 'md5_hex'};
  430. error($@) if $@;
  431. require Encode;
  432. my $digest=md5_hex(Encode::encode_utf8($params{content}));
  433. return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
  434. $guid->{md5}=$digest;
  435. # Create the page.
  436. my $template=template("aggregatepost.tmpl", blind_cache => 1);
  437. $template->param(title => $params{title})
  438. if defined $params{title} && length($params{title});
  439. $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
  440. $template->param(name => $feed->{name});
  441. $template->param(url => $feed->{url});
  442. $template->param(copyright => $params{copyright})
  443. if defined $params{copyright} && length $params{copyright};
  444. $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
  445. if defined $params{link};
  446. if (ref $feed->{tags}) {
  447. $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
  448. }
  449. writefile(htmlfn($guid->{page}), $config{srcdir},
  450. $template->output);
  451. # Set the mtime, this lets the build process get the right creation
  452. # time on record for the new page.
  453. utime $mtime, $mtime, pagefile($guid->{page})
  454. if defined $mtime && $mtime <= time;
  455. } #}}}
  456. sub htmlescape ($) { #{{{
  457. # escape accidental wikilinks and preprocessor stuff
  458. my $html=shift;
  459. $html=~s/(?<!\\)\[\[/\\\[\[/g;
  460. return $html;
  461. } #}}}
  462. sub urlabs ($$) { #{{{
  463. my $url=shift;
  464. my $urlbase=shift;
  465. URI->new_abs($url, $urlbase)->as_string;
  466. } #}}}
  467. sub htmlabs ($$) { #{{{
  468. # Convert links in html from relative to absolute.
  469. # Note that this is a heuristic, which is not specified by the rss
  470. # spec and may not be right for all feeds. Also, see Debian
  471. # bug #381359.
  472. my $html=shift;
  473. my $urlbase=shift;
  474. my $ret="";
  475. my $p = HTML::Parser->new(api_version => 3);
  476. $p->handler(default => sub { $ret.=join("", @_) }, "text");
  477. $p->handler(start => sub {
  478. my ($tagname, $pos, $text) = @_;
  479. if (ref $HTML::Tagset::linkElements{$tagname}) {
  480. while (4 <= @$pos) {
  481. # use attribute sets from right to left
  482. # to avoid invalidating the offsets
  483. # when replacing the values
  484. my($k_offset, $k_len, $v_offset, $v_len) =
  485. splice(@$pos, -4);
  486. my $attrname = lc(substr($text, $k_offset, $k_len));
  487. next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
  488. next unless $v_offset; # 0 v_offset means no value
  489. my $v = substr($text, $v_offset, $v_len);
  490. $v =~ s/^([\'\"])(.*)\1$/$2/;
  491. my $new_v=urlabs($v, $urlbase);
  492. $new_v =~ s/\"/&quot;/g; # since we quote with ""
  493. substr($text, $v_offset, $v_len) = qq("$new_v");
  494. }
  495. }
  496. $ret.=$text;
  497. }, "tagname, tokenpos, text");
  498. $p->parse($html);
  499. $p->eof;
  500. return $ret;
  501. } #}}}
  502. sub pagefile ($) { #{{{
  503. my $page=shift;
  504. return "$config{srcdir}/".htmlfn($page);
  505. } #}}}
  506. sub htmlfn ($) { #{{{
  507. return shift().".".$config{htmlext};
  508. } #}}}
  509. my $aggregatelock;
  510. sub lockaggregate () { #{{{
  511. # Take an exclusive lock to prevent multiple concurrent aggregators.
  512. # Returns true if the lock was aquired.
  513. if (! -d $config{wikistatedir}) {
  514. mkdir($config{wikistatedir});
  515. }
  516. open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
  517. error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
  518. if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
  519. close($aggregatelock) || error("failed closing aggregatelock: $!");
  520. return 0;
  521. }
  522. return 1;
  523. } #}}}
  524. sub unlockaggregate () { #{{{
  525. return close($aggregatelock) if $aggregatelock;
  526. return;
  527. } #}}}
  528. 1