]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blob - IkiWiki/Plugin/aggregate.pm
Testing a change via the sandbox.
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
5 use warnings;
6 use strict;
7 use IkiWiki 2.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use URI;
12 use open qw{:utf8 :std};
14 my %feeds;
15 my %guids;
17 sub import { #{{{
18         hook(type => "getopt", id => "aggregate", call => \&getopt);
19         hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
20         hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
21         hook(type => "preprocess", id => "aggregate", call => \&preprocess);
22         hook(type => "delete", id => "aggregate", call => \&delete);
23         hook(type => "savestate", id => "aggregate", call => \&savestate);
24         if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
25                 hook(type => "cgi", id => "aggregate", call => \&cgi);
26         }
27 } # }}}
29 sub getopt () { #{{{
30         eval q{use Getopt::Long};
31         error($@) if $@;
32         Getopt::Long::Configure('pass_through');
33         GetOptions("aggregate" => \$config{aggregate});
34 } #}}}
36 sub checkconfig () { #{{{
37         if ($config{aggregate} && ! ($config{post_commit} && 
38                                      IkiWiki::commit_hook_enabled())) {
39                 launchaggregation();
40         }
41 } #}}}
43 sub cgi ($) { #{{{
44         my $cgi=shift;
46         if (defined $cgi->param('do') &&
47             $cgi->param("do") eq "aggregate_webtrigger") {
48                 $|=1;
49                 print "Content-Type: text/plain\n\n";
50                 $config{cgi}=0;
51                 $config{verbose}=1;
52                 $config{syslog}=0;
53                 print gettext("Aggregation triggered via web.")."\n\n";
54                 if (launchaggregation()) {
55                         IkiWiki::lockwiki();
56                         IkiWiki::loadindex();
57                         require IkiWiki::Render;
58                         IkiWiki::refresh();
59                         IkiWiki::saveindex();
60                 }
61                 else {
62                         print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
63                 }
64                 exit 0;
65         }
66 } #}}}
68 sub launchaggregation () { #{{{
69         # See if any feeds need aggregation.
70         loadstate();
71         my @feeds=needsaggregate();
72         return unless @feeds;
73         if (! lockaggregate()) {
74                 debug("an aggregation process is already running");
75                 return;
76         }
77         # force a later rebuild of source pages
78         $IkiWiki::forcerebuild{$_->{sourcepage}}=1
79                 foreach @feeds;
81         # Fork a child process to handle the aggregation.
82         # The parent process will then handle building the
83         # result. This avoids messy code to clear state
84         # accumulated while aggregating.
85         defined(my $pid = fork) or error("Can't fork: $!");
86         if (! $pid) {
87                 IkiWiki::loadindex();
88                 # Aggregation happens without the main wiki lock
89                 # being held. This allows editing pages etc while
90                 # aggregation is running.
91                 aggregate(@feeds);
93                 IkiWiki::lockwiki;
94                 # Merge changes, since aggregation state may have
95                 # changed on disk while the aggregation was happening.
96                 mergestate();
97                 expire();
98                 savestate();
99                 IkiWiki::unlockwiki;
100                 exit 0;
101         }
102         waitpid($pid,0);
103         if ($?) {
104                 error "aggregation failed with code $?";
105         }
107         clearstate();
108         unlockaggregate();
110         return 1;
111 } #}}}
113 sub needsbuild (@) { #{{{
114         my $needsbuild=shift;
115         
116         loadstate();
118         foreach my $feed (values %feeds) {
119                 if (exists $pagesources{$feed->{sourcepage}} && 
120                     grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
121                         # Mark all feeds originating on this page as 
122                         # not yet seen; preprocess will unmark those that
123                         # still exist.
124                         markunseen($feed->{sourcepage});
125                 }
126         }
127 } # }}}
129 sub preprocess (@) { #{{{
130         my %params=@_;
132         foreach my $required (qw{name url}) {
133                 if (! exists $params{$required}) {
134                         return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
135                 }
136         }
138         my $feed={};
139         my $name=$params{name};
140         if (exists $feeds{$name}) {
141                 $feed=$feeds{$name};
142         }
143         else {
144                 $feeds{$name}=$feed;
145         }
146         $feed->{name}=$name;
147         $feed->{sourcepage}=$params{page};
148         $feed->{url}=$params{url};
149         my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
150         $dir=~s/^\/+//;
151         ($dir)=$dir=~/$config{wiki_file_regexp}/;
152         $feed->{dir}=$dir;
153         $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
154         $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
155         $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
156         $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
157         if (exists $params{template}) {
158                 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
159         }
160         else {
161                 $params{template} = "aggregatepost"
162         }
163         $feed->{template}=$params{template} . ".tmpl";
164         delete $feed->{unseen};
165         $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
166         $feed->{numposts}=0 unless defined $feed->{numposts};
167         $feed->{newposts}=0 unless defined $feed->{newposts};
168         $feed->{message}=gettext("new feed") unless defined $feed->{message};
169         $feed->{error}=0 unless defined $feed->{error};
170         $feed->{tags}=[];
171         while (@_) {
172                 my $key=shift;
173                 my $value=shift;
174                 if ($key eq 'tag') {
175                         push @{$feed->{tags}}, $value;
176                 }
177         }
179         return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
180                ($feed->{error} ? "<em>" : "").$feed->{message}.
181                ($feed->{error} ? "</em>" : "").
182                " (".$feed->{numposts}." ".gettext("posts").
183                ($feed->{newposts} ? "; ".$feed->{newposts}.
184                                     " ".gettext("new") : "").
185                ")";
186 } # }}}
188 sub delete (@) { #{{{
189         my @files=@_;
191         # Remove feed data for removed pages.
192         foreach my $file (@files) {
193                 my $page=pagename($file);
194                 markunseen($page);
195         }
196 } #}}}
198 sub markunseen ($) { #{{{
199         my $page=shift;
201         foreach my $id (keys %feeds) {
202                 if ($feeds{$id}->{sourcepage} eq $page) {
203                         $feeds{$id}->{unseen}=1;
204                 }
205         }
206 } #}}}
208 my $state_loaded=0;
210 sub loadstate () { #{{{
211         return if $state_loaded;
212         $state_loaded=1;
213         if (-e "$config{wikistatedir}/aggregate") {
214                 open(IN, "$config{wikistatedir}/aggregate") ||
215                         die "$config{wikistatedir}/aggregate: $!";
216                 while (<IN>) {
217                         $_=IkiWiki::possibly_foolish_untaint($_);
218                         chomp;
219                         my $data={};
220                         foreach my $i (split(/ /, $_)) {
221                                 my ($field, $val)=split(/=/, $i, 2);
222                                 if ($field eq "name" || $field eq "feed" ||
223                                     $field eq "guid" || $field eq "message") {
224                                         $data->{$field}=decode_entities($val, " \t\n");
225                                 }
226                                 elsif ($field eq "tag") {
227                                         push @{$data->{tags}}, $val;
228                                 }
229                                 else {
230                                         $data->{$field}=$val;
231                                 }
232                         }
233                         
234                         if (exists $data->{name}) {
235                                 $feeds{$data->{name}}=$data;
236                         }
237                         elsif (exists $data->{guid}) {
238                                 $guids{$data->{guid}}=$data;
239                         }
240                 }
242                 close IN;
243         }
244 } #}}}
246 sub savestate () { #{{{
247         return unless $state_loaded;
248         garbage_collect();
249         my $newfile="$config{wikistatedir}/aggregate.new";
250         my $cleanup = sub { unlink($newfile) };
251         open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
252         foreach my $data (values %feeds, values %guids) {
253                 my @line;
254                 foreach my $field (keys %$data) {
255                         if ($field eq "name" || $field eq "feed" ||
256                             $field eq "guid" || $field eq "message") {
257                                 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
258                         }
259                         elsif ($field eq "tags") {
260                                 push @line, "tag=$_" foreach @{$data->{tags}};
261                         }
262                         else {
263                                 push @line, "$field=".$data->{$field};
264                         }
265                 }
266                 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
267         }
268         close OUT || error("save $newfile: $!", $cleanup);
269         rename($newfile, "$config{wikistatedir}/aggregate") ||
270                 error("rename $newfile: $!", $cleanup);
271 } #}}}
273 sub garbage_collect () { #{{{
274         foreach my $name (keys %feeds) {
275                 # remove any feeds that were not seen while building the pages
276                 # that used to contain them
277                 if ($feeds{$name}->{unseen}) {
278                         delete $feeds{$name};
279                 }
280         }
282         foreach my $guid (values %guids) {
283                 # any guid whose feed is gone should be removed
284                 if (! exists $feeds{$guid->{feed}}) {
285                         unlink pagefile($guid->{page})
286                                 if exists $guid->{page};
287                         delete $guids{$guid->{guid}};
288                 }
289                 # handle expired guids
290                 elsif ($guid->{expired} && exists $guid->{page}) {
291                         unlink pagefile($guid->{page});
292                         delete $guid->{page};
293                         delete $guid->{md5};
294                 }
295         }
296 } #}}}
298 sub mergestate () { #{{{
299         # Load the current state in from disk, and merge into it
300         # values from the state in memory that might have changed
301         # during aggregation.
302         my %myfeeds=%feeds;
303         my %myguids=%guids;
304         clearstate();
305         loadstate();
307         # All that can change in feed state during aggregation is a few
308         # fields.
309         foreach my $name (keys %myfeeds) {
310                 if (exists $feeds{$name}) {
311                         foreach my $field (qw{message lastupdate numposts
312                                               newposts error}) {
313                                 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
314                         }
315                 }
316         }
318         # New guids can be created during aggregation.
319         # It's also possible that guids were removed from the on-disk state
320         # while the aggregation was in process. That would only happen if
321         # their feed was also removed, so any removed guids added back here
322         # will be garbage collected later.
323         foreach my $guid (keys %myguids) {
324                 if (! exists $guids{$guid}) {
325                         $guids{$guid}=$myguids{$guid};
326                 }
327         }
328 } #}}}
330 sub clearstate () { #{{{
331         %feeds=();
332         %guids=();
333         $state_loaded=0;
334 } #}}}
336 sub expire () { #{{{
337         foreach my $feed (values %feeds) {
338                 next unless $feed->{expireage} || $feed->{expirecount};
339                 my $count=0;
340                 my %seen;
341                 foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
342                                   grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
343                                   values %guids) {
344                         if ($feed->{expireage}) {
345                                 my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
346                                 if ($days_old > $feed->{expireage}) {
347                                         debug(sprintf(gettext("expiring %s (%s days old)"),
348                                                 $item->{page}, int($days_old)));
349                                         $item->{expired}=1;
350                                 }
351                         }
352                         elsif ($feed->{expirecount} &&
353                                $count >= $feed->{expirecount}) {
354                                 debug(sprintf(gettext("expiring %s"), $item->{page}));
355                                 $item->{expired}=1;
356                         }
357                         else {
358                                 if (! $seen{$item->{page}}) {
359                                         $seen{$item->{page}}=1;
360                                         $count++;
361                                 }
362                         }
363                 }
364         }
365 } #}}}
367 sub needsaggregate () { #{{{
368         return values %feeds if $config{rebuild};
369         return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
370 } #}}}
372 sub aggregate (@) { #{{{
373         eval q{use XML::Feed};
374         error($@) if $@;
375         eval q{use URI::Fetch};
376         error($@) if $@;
378         foreach my $feed (@_) {
379                 $feed->{lastupdate}=time;
380                 $feed->{newposts}=0;
381                 $feed->{message}=sprintf(gettext("processed ok at %s"),
382                         displaytime($feed->{lastupdate}));
383                 $feed->{error}=0;
385                 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
387                 if (! length $feed->{feedurl}) {
388                         my @urls=XML::Feed->find_feeds($feed->{url});
389                         if (! @urls) {
390                                 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
391                                 $feed->{error}=1;
392                                 debug($feed->{message});
393                                 next;
394                         }
395                         $feed->{feedurl}=pop @urls;
396                 }
397                 my $res=URI::Fetch->fetch($feed->{feedurl});
398                 if (! $res) {
399                         $feed->{message}=URI::Fetch->errstr;
400                         $feed->{error}=1;
401                         debug($feed->{message});
402                         next;
403                 }
404                 if ($res->status == URI::Fetch::URI_GONE()) {
405                         $feed->{message}=gettext("feed not found");
406                         $feed->{error}=1;
407                         debug($feed->{message});
408                         next;
409                 }
410                 my $content=$res->content;
411                 my $f=eval{XML::Feed->parse(\$content)};
412                 if ($@) {
413                         # One common cause of XML::Feed crashing is a feed
414                         # that contains invalid UTF-8 sequences. Convert
415                         # feed to ascii to try to work around.
416                         $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
417                         $content=Encode::decode_utf8($content, 0);
418                         $f=eval{XML::Feed->parse(\$content)};
419                 }
420                 if ($@) {
421                         # Another possibility is badly escaped entities.
422                         $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
423                         $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
424                         $content=Encode::decode_utf8($content, 0);
425                         $f=eval{XML::Feed->parse(\$content)};
426                 }
427                 if ($@) {
428                         $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
429                         $feed->{error}=1;
430                         debug($feed->{message});
431                         next;
432                 }
433                 if (! $f) {
434                         $feed->{message}=XML::Feed->errstr;
435                         $feed->{error}=1;
436                         debug($feed->{message});
437                         next;
438                 }
440                 foreach my $entry ($f->entries) {
441                         add_page(
442                                 feed => $feed,
443                                 copyright => $f->copyright,
444                                 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
445                                 link => $entry->link,
446                                 content => defined $entry->content->body ? $entry->content->body : "",
447                                 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
448                                 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
449                         );
450                 }
451         }
452 } #}}}
454 sub add_page (@) { #{{{
455         my %params=@_;
456         
457         my $feed=$params{feed};
458         my $guid={};
459         my $mtime;
460         if (exists $guids{$params{guid}}) {
461                 # updating an existing post
462                 $guid=$guids{$params{guid}};
463                 return if $guid->{expired};
464         }
465         else {
466                 # new post
467                 $guid->{guid}=$params{guid};
468                 $guids{$params{guid}}=$guid;
469                 $mtime=$params{ctime};
470                 $feed->{numposts}++;
471                 $feed->{newposts}++;
473                 # assign it an unused page
474                 my $page=IkiWiki::titlepage($params{title});
475                 # escape slashes and periods in title so it doesn't specify
476                 # directory name or trigger ".." disallowing code.
477                 $page=~s!([/.])!"__".ord($1)."__"!eg;
478                 $page=$feed->{dir}."/".$page;
479                 ($page)=$page=~/$config{wiki_file_regexp}/;
480                 if (! defined $page || ! length $page) {
481                         $page=$feed->{dir}."/item";
482                 }
483                 my $c="";
484                 while (exists $IkiWiki::pagecase{lc $page.$c} ||
485                        -e pagefile($page.$c)) {
486                         $c++
487                 }
489                 # Make sure that the file name isn't too long. 
490                 # NB: This doesn't check for path length limits.
491                 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
492                 if (defined $max && length(htmlfn($page)) >= $max) {
493                         $c="";
494                         $page=$feed->{dir}."/item";
495                         while (exists $IkiWiki::pagecase{lc $page.$c} ||
496                                -e pagefile($page.$c)) {
497                                 $c++
498                         }
499                 }
501                 $guid->{page}=$page;
502                 debug(sprintf(gettext("creating new page %s"), $page));
503         }
504         $guid->{feed}=$feed->{name};
505         
506         # To write or not to write? Need to avoid writing unchanged pages
507         # to avoid unneccessary rebuilding. The mtime from rss cannot be
508         # trusted; let's use a digest.
509         eval q{use Digest::MD5 'md5_hex'};
510         error($@) if $@;
511         require Encode;
512         my $digest=md5_hex(Encode::encode_utf8($params{content}));
513         return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
514         $guid->{md5}=$digest;
516         # Create the page.
517         my $template=template($feed->{template}, blind_cache => 1);
518         $template->param(title => $params{title})
519                 if defined $params{title} && length($params{title});
520         $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
521         $template->param(name => $feed->{name});
522         $template->param(url => $feed->{url});
523         $template->param(copyright => $params{copyright})
524                 if defined $params{copyright} && length $params{copyright};
525         $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
526                 if defined $params{link};
527         if (ref $feed->{tags}) {
528                 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
529         }
530         writefile(htmlfn($guid->{page}), $config{srcdir},
531                 $template->output);
533         # Set the mtime, this lets the build process get the right creation
534         # time on record for the new page.
535         utime $mtime, $mtime, pagefile($guid->{page})
536                 if defined $mtime && $mtime <= time;
537 } #}}}
539 sub htmlescape ($) { #{{{
540         # escape accidental wikilinks and preprocessor stuff
541         my $html=shift;
542         $html=~s/(?<!\\)\[\[/\\\[\[/g;
543         return $html;
544 } #}}}
546 sub urlabs ($$) { #{{{
547         my $url=shift;
548         my $urlbase=shift;
550         URI->new_abs($url, $urlbase)->as_string;
551 } #}}}
553 sub htmlabs ($$) { #{{{
554         # Convert links in html from relative to absolute.
555         # Note that this is a heuristic, which is not specified by the rss
556         # spec and may not be right for all feeds. Also, see Debian
557         # bug #381359.
558         my $html=shift;
559         my $urlbase=shift;
561         my $ret="";
562         my $p = HTML::Parser->new(api_version => 3);
563         $p->handler(default => sub { $ret.=join("", @_) }, "text");
564         $p->handler(start => sub {
565                 my ($tagname, $pos, $text) = @_;
566                 if (ref $HTML::Tagset::linkElements{$tagname}) {
567                         while (4 <= @$pos) {
568                                 # use attribute sets from right to left
569                                 # to avoid invalidating the offsets
570                                 # when replacing the values
571                                 my($k_offset, $k_len, $v_offset, $v_len) =
572                                         splice(@$pos, -4);
573                                 my $attrname = lc(substr($text, $k_offset, $k_len));
574                                 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
575                                 next unless $v_offset; # 0 v_offset means no value
576                                 my $v = substr($text, $v_offset, $v_len);
577                                 $v =~ s/^([\'\"])(.*)\1$/$2/;
578                                 my $new_v=urlabs($v, $urlbase);
579                                 $new_v =~ s/\"/&quot;/g; # since we quote with ""
580                                 substr($text, $v_offset, $v_len) = qq("$new_v");
581                         }
582                 }
583                 $ret.=$text;
584         }, "tagname, tokenpos, text");
585         $p->parse($html);
586         $p->eof;
588         return $ret;
589 } #}}}
591 sub pagefile ($) { #{{{
592         my $page=shift;
594         return "$config{srcdir}/".htmlfn($page);
595 } #}}}
597 sub htmlfn ($) { #{{{
598         return shift().".".$config{htmlext};
599 } #}}}
601 my $aggregatelock;
603 sub lockaggregate () { #{{{
604         # Take an exclusive lock to prevent multiple concurrent aggregators.
605         # Returns true if the lock was aquired.
606         if (! -d $config{wikistatedir}) {
607                 mkdir($config{wikistatedir});
608         }
609         open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
610                 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
611         if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
612                 close($aggregatelock) || error("failed closing aggregatelock: $!");
613                 return 0;
614         }
615         return 1;
616 } #}}}
618 sub unlockaggregate () { #{{{
619         return close($aggregatelock) if $aggregatelock;
620         return;
621 } #}}}