2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
12 use open qw{:utf8 :std};
18 hook(type => "getopt", id => "aggregate", call => \&getopt);
19 hook(type => "getsetup", id => "aggregate", call => \&getsetup);
20 hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
21 hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
22 hook(type => "preprocess", id => "aggregate", call => \&preprocess);
23 hook(type => "delete", id => "aggregate", call => \&delete);
24 hook(type => "savestate", id => "aggregate", call => \&savestate);
25 hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
26 if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
27 hook(type => "cgi", id => "aggregate", call => \&cgi);
32 eval q{use Getopt::Long};
34 Getopt::Long::Configure('pass_through');
36 "aggregate" => \$config{aggregate},
37 "aggregateinternal!" => \$config{aggregateinternal},
47 aggregateinternal => {
50 description => "enable aggregation to internal pages?",
51 safe => 0, # enabling needs manual transition
54 aggregate_webtrigger => {
57 description => "allow aggregation to be triggered via the web?",
64 if (! defined $config{aggregateinternal}) {
65 $config{aggregateinternal}=1;
68 if ($config{aggregate} && ! ($config{post_commit} &&
69 IkiWiki::commit_hook_enabled())) {
77 if (defined $cgi->param('do') &&
78 $cgi->param("do") eq "aggregate_webtrigger") {
80 print "Content-Type: text/plain\n\n";
84 print gettext("Aggregation triggered via web.")."\n\n";
85 if (launchaggregation()) {
88 require IkiWiki::Render;
93 print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
99 sub launchaggregation () {
100 # See if any feeds need aggregation.
102 my @feeds=needsaggregate();
103 return unless @feeds;
104 if (! lockaggregate()) {
105 debug("an aggregation process is already running");
108 # force a later rebuild of source pages
109 $IkiWiki::forcerebuild{$_->{sourcepage}}=1
112 # Fork a child process to handle the aggregation.
113 # The parent process will then handle building the
114 # result. This avoids messy code to clear state
115 # accumulated while aggregating.
116 defined(my $pid = fork) or error("Can't fork: $!");
118 IkiWiki::loadindex();
119 # Aggregation happens without the main wiki lock
120 # being held. This allows editing pages etc while
121 # aggregation is running.
125 # Merge changes, since aggregation state may have
126 # changed on disk while the aggregation was happening.
135 error "aggregation failed with code $?";
144 # Pages with extension _aggregated have plain html markup, pass through.
147 return $params{content};
150 # Used by ikiwiki-transition aggregateinternal.
151 sub migrate_to_internal {
152 if (! lockaggregate()) {
153 error("an aggregation process is currently running");
160 foreach my $data (values %guids) {
161 next unless $data->{page};
162 next if $data->{expired};
164 $config{aggregateinternal} = 0;
165 my $oldname = "$config{srcdir}/".htmlfn($data->{page});
166 my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
168 $config{aggregateinternal} = 1;
169 my $newname = "$config{srcdir}/".htmlfn($data->{page});
171 debug "moving $oldname -> $newname";
174 error("$newname already exists");
177 debug("already renamed to $newname?");
180 elsif (-e $oldname) {
181 rename($oldname, $newname) || error("$!");
184 debug("$oldname not found");
187 require IkiWiki::Render;
188 debug("removing output file $oldoutput");
189 IkiWiki::prune($oldoutput);
200 my $needsbuild=shift;
204 foreach my $feed (values %feeds) {
205 if (exists $pagesources{$feed->{sourcepage}} &&
206 grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
207 # Mark all feeds originating on this page as
208 # not yet seen; preprocess will unmark those that
210 markunseen($feed->{sourcepage});
220 foreach my $required (qw{name url}) {
221 if (! exists $params{$required}) {
222 error sprintf(gettext("missing %s parameter"), $required)
227 my $name=$params{name};
228 if (exists $feeds{$name}) {
235 $feed->{sourcepage}=$params{page};
236 $feed->{url}=$params{url};
237 my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
239 ($dir)=$dir=~/$config{wiki_file_regexp}/;
241 $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
242 $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
243 $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
244 $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
245 if (exists $params{template}) {
246 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
249 $params{template} = "aggregatepost"
251 $feed->{template}=$params{template} . ".tmpl";
252 delete $feed->{unseen};
253 $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
254 $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
255 $feed->{numposts}=0 unless defined $feed->{numposts};
256 $feed->{newposts}=0 unless defined $feed->{newposts};
257 $feed->{message}=gettext("new feed") unless defined $feed->{message};
258 $feed->{error}=0 unless defined $feed->{error};
264 push @{$feed->{tags}}, $value;
268 return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
269 ($feed->{error} ? "<em>" : "").$feed->{message}.
270 ($feed->{error} ? "</em>" : "").
271 " (".$feed->{numposts}." ".gettext("posts").
272 ($feed->{newposts} ? "; ".$feed->{newposts}.
273 " ".gettext("new") : "").
280 # Remove feed data for removed pages.
281 foreach my $file (@files) {
282 my $page=pagename($file);
290 foreach my $id (keys %feeds) {
291 if ($feeds{$id}->{sourcepage} eq $page) {
292 $feeds{$id}->{unseen}=1;
300 return if $state_loaded;
302 if (-e "$config{wikistatedir}/aggregate") {
303 open(IN, "<", "$config{wikistatedir}/aggregate") ||
304 die "$config{wikistatedir}/aggregate: $!";
306 $_=IkiWiki::possibly_foolish_untaint($_);
309 foreach my $i (split(/ /, $_)) {
310 my ($field, $val)=split(/=/, $i, 2);
311 if ($field eq "name" || $field eq "feed" ||
312 $field eq "guid" || $field eq "message") {
313 $data->{$field}=decode_entities($val, " \t\n");
315 elsif ($field eq "tag") {
316 push @{$data->{tags}}, $val;
319 $data->{$field}=$val;
323 if (exists $data->{name}) {
324 $feeds{$data->{name}}=$data;
326 elsif (exists $data->{guid}) {
327 $guids{$data->{guid}}=$data;
336 return unless $state_loaded;
338 my $newfile="$config{wikistatedir}/aggregate.new";
339 my $cleanup = sub { unlink($newfile) };
340 open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
341 foreach my $data (values %feeds, values %guids) {
343 foreach my $field (keys %$data) {
344 if ($field eq "name" || $field eq "feed" ||
345 $field eq "guid" || $field eq "message") {
346 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
348 elsif ($field eq "tags") {
349 push @line, "tag=$_" foreach @{$data->{tags}};
352 push @line, "$field=".$data->{$field}
353 if defined $data->{$field};
356 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
358 close OUT || error("save $newfile: $!", $cleanup);
359 rename($newfile, "$config{wikistatedir}/aggregate") ||
360 error("rename $newfile: $!", $cleanup);
363 foreach my $feed (keys %feeds) {
364 my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
365 if (! defined $timestamp || $timestamp > $t) {
369 $newfile=~s/\.new$/time/;
370 open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
371 if (defined $timestamp) {
372 print OUT $timestamp."\n";
374 close OUT || error("save $newfile: $!", $cleanup);
377 sub garbage_collect () {
378 foreach my $name (keys %feeds) {
379 # remove any feeds that were not seen while building the pages
380 # that used to contain them
381 if ($feeds{$name}->{unseen}) {
382 delete $feeds{$name};
386 foreach my $guid (values %guids) {
387 # any guid whose feed is gone should be removed
388 if (! exists $feeds{$guid->{feed}}) {
389 unlink "$config{srcdir}/".htmlfn($guid->{page})
390 if exists $guid->{page};
391 delete $guids{$guid->{guid}};
393 # handle expired guids
394 elsif ($guid->{expired} && exists $guid->{page}) {
395 unlink "$config{srcdir}/".htmlfn($guid->{page});
396 delete $guid->{page};
403 # Load the current state in from disk, and merge into it
404 # values from the state in memory that might have changed
405 # during aggregation.
411 # All that can change in feed state during aggregation is a few
413 foreach my $name (keys %myfeeds) {
414 if (exists $feeds{$name}) {
415 foreach my $field (qw{message lastupdate lasttry
416 numposts newposts error}) {
417 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
422 # New guids can be created during aggregation.
423 # Guids have a few fields that may be updated during aggregation.
424 # It's also possible that guids were removed from the on-disk state
425 # while the aggregation was in process. That would only happen if
426 # their feed was also removed, so any removed guids added back here
427 # will be garbage collected later.
428 foreach my $guid (keys %myguids) {
429 if (! exists $guids{$guid}) {
430 $guids{$guid}=$myguids{$guid};
433 foreach my $field (qw{md5}) {
434 $guids{$guid}->{$field}=$myguids{$guid}->{$field};
447 foreach my $feed (values %feeds) {
448 next unless $feed->{expireage} || $feed->{expirecount};
451 foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
452 grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
454 if ($feed->{expireage}) {
455 my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
456 if ($days_old > $feed->{expireage}) {
457 debug(sprintf(gettext("expiring %s (%s days old)"),
458 $item->{page}, int($days_old)));
462 elsif ($feed->{expirecount} &&
463 $count >= $feed->{expirecount}) {
464 debug(sprintf(gettext("expiring %s"), $item->{page}));
468 if (! $seen{$item->{page}}) {
469 $seen{$item->{page}}=1;
477 sub needsaggregate () {
478 return values %feeds if $config{rebuild};
479 return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
483 eval q{use XML::Feed};
485 eval q{use URI::Fetch};
488 foreach my $feed (@_) {
489 $feed->{lasttry}=time;
491 $feed->{message}=sprintf(gettext("last checked %s"),
492 displaytime($feed->{lasttry}));
495 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
497 if (! length $feed->{feedurl}) {
498 my @urls=XML::Feed->find_feeds($feed->{url});
500 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
502 debug($feed->{message});
505 $feed->{feedurl}=pop @urls;
507 my $res=URI::Fetch->fetch($feed->{feedurl});
509 $feed->{message}=URI::Fetch->errstr;
511 debug($feed->{message});
515 # lastupdate is only set if we were able to contact the server
516 $feed->{lastupdate}=$feed->{lasttry};
518 if ($res->status == URI::Fetch::URI_GONE()) {
519 $feed->{message}=gettext("feed not found");
521 debug($feed->{message});
524 my $content=$res->content;
525 my $f=eval{XML::Feed->parse(\$content)};
527 # One common cause of XML::Feed crashing is a feed
528 # that contains invalid UTF-8 sequences. Convert
529 # feed to ascii to try to work around.
530 $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
532 $content=Encode::decode_utf8($content, 0);
533 XML::Feed->parse(\$content)
537 # Another possibility is badly escaped entities.
538 $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
539 $content=~s/\&(?!amp)(\w+);/&$1;/g;
541 $content=Encode::decode_utf8($content, 0);
542 XML::Feed->parse(\$content)
546 $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
548 debug($feed->{message});
552 $feed->{message}=XML::Feed->errstr;
554 debug($feed->{message});
558 foreach my $entry ($f->entries) {
559 # XML::Feed doesn't work around XML::Atom's bizarre
560 # API, so we will. Real unicode strings? Yes please.
561 # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
562 local $XML::Atom::ForceUnicode = 1;
564 my $c=$entry->content;
565 # atom feeds may have no content, only a summary
566 if (! defined $c && ref $entry->summary) {
572 copyright => $f->copyright,
573 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
574 link => $entry->link,
575 content => (defined $c && defined $c->body) ? $c->body : "",
576 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
577 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
578 base => (defined $c && $c->can("base")) ? $c->base : undef,
587 my $feed=$params{feed};
590 if (exists $guids{$params{guid}}) {
591 # updating an existing post
592 $guid=$guids{$params{guid}};
593 return if $guid->{expired};
597 $guid->{guid}=$params{guid};
598 $guids{$params{guid}}=$guid;
599 $mtime=$params{ctime};
603 # assign it an unused page
604 my $page=titlepage($params{title});
605 # escape slashes and periods in title so it doesn't specify
606 # directory name or trigger ".." disallowing code.
607 $page=~s!([/.])!"__".ord($1)."__"!eg;
608 $page=$feed->{dir}."/".$page;
609 ($page)=$page=~/$config{wiki_file_regexp}/;
610 if (! defined $page || ! length $page) {
611 $page=$feed->{dir}."/item";
614 while (exists $IkiWiki::pagecase{lc $page.$c} ||
615 -e "$config{srcdir}/".htmlfn($page.$c)) {
619 # Make sure that the file name isn't too long.
620 # NB: This doesn't check for path length limits.
621 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
622 if (defined $max && length(htmlfn($page)) >= $max) {
624 $page=$feed->{dir}."/item";
625 while (exists $IkiWiki::pagecase{lc $page.$c} ||
626 -e "$config{srcdir}/".htmlfn($page.$c)) {
632 debug(sprintf(gettext("creating new page %s"), $page));
634 $guid->{feed}=$feed->{name};
636 # To write or not to write? Need to avoid writing unchanged pages
637 # to avoid unneccessary rebuilding. The mtime from rss cannot be
638 # trusted; let's use a digest.
639 eval q{use Digest::MD5 'md5_hex'};
642 my $digest=md5_hex(Encode::encode_utf8($params{content}));
643 return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
644 $guid->{md5}=$digest;
647 my $template=template($feed->{template}, blind_cache => 1);
648 $template->param(title => $params{title})
649 if defined $params{title} && length($params{title});
650 $template->param(content => wikiescape(htmlabs($params{content},
651 defined $params{base} ? $params{base} : $feed->{feedurl})));
652 $template->param(name => $feed->{name});
653 $template->param(url => $feed->{url});
654 $template->param(copyright => $params{copyright})
655 if defined $params{copyright} && length $params{copyright};
656 $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
657 if defined $params{link};
658 if (ref $feed->{tags}) {
659 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
661 writefile(htmlfn($guid->{page}), $config{srcdir},
664 if (defined $mtime && $mtime <= time) {
665 # Set the mtime, this lets the build process get the right
666 # creation time on record for the new page.
667 utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
668 # Store it in pagectime for expiry code to use also.
669 $IkiWiki::pagectime{$guid->{page}}=$mtime
670 unless exists $IkiWiki::pagectime{$guid->{page}};
673 # Dummy value for expiry code.
674 $IkiWiki::pagectime{$guid->{page}}=time
675 unless exists $IkiWiki::pagectime{$guid->{page}};
680 # escape accidental wikilinks and preprocessor stuff
681 return encode_entities(shift, '\[\]');
688 URI->new_abs($url, $urlbase)->as_string;
692 # Convert links in html from relative to absolute.
693 # Note that this is a heuristic, which is not specified by the rss
694 # spec and may not be right for all feeds. Also, see Debian
700 my $p = HTML::Parser->new(api_version => 3);
701 $p->handler(default => sub { $ret.=join("", @_) }, "text");
702 $p->handler(start => sub {
703 my ($tagname, $pos, $text) = @_;
704 if (ref $HTML::Tagset::linkElements{$tagname}) {
706 # use attribute sets from right to left
707 # to avoid invalidating the offsets
708 # when replacing the values
709 my($k_offset, $k_len, $v_offset, $v_len) =
711 my $attrname = lc(substr($text, $k_offset, $k_len));
712 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
713 next unless $v_offset; # 0 v_offset means no value
714 my $v = substr($text, $v_offset, $v_len);
715 $v =~ s/^([\'\"])(.*)\1$/$2/;
716 my $new_v=urlabs($v, $urlbase);
717 $new_v =~ s/\"/"/g; # since we quote with ""
718 substr($text, $v_offset, $v_len) = qq("$new_v");
722 }, "tagname, tokenpos, text");
730 return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
735 sub lockaggregate () {
736 # Take an exclusive lock to prevent multiple concurrent aggregators.
737 # Returns true if the lock was aquired.
738 if (! -d $config{wikistatedir}) {
739 mkdir($config{wikistatedir});
741 open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
742 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
743 if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
744 close($aggregatelock) || error("failed closing aggregatelock: $!");
750 sub unlockaggregate () {
751 return close($aggregatelock) if $aggregatelock;