]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blob - IkiWiki/Plugin/aggregate.pm
Merge remote-tracking branch 'schmonz/fancypodcast'
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
5 use warnings;
6 use strict;
7 use IkiWiki 3.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use open qw{:utf8 :std};
13 my %feeds;
14 my %guids;
16 sub import {
17         hook(type => "getopt", id => "aggregate", call => \&getopt);
18         hook(type => "getsetup", id => "aggregate", call => \&getsetup);
19         hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
20                 last => 1);
21         hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
22         hook(type => "preprocess", id => "aggregate", call => \&preprocess);
23         hook(type => "delete", id => "aggregate", call => \&delete);
24         hook(type => "savestate", id => "aggregate", call => \&savestate);
25         hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
26         if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
27                 hook(type => "cgi", id => "aggregate", call => \&cgi);
28         }
29 }
31 sub getopt () {
32         eval q{use Getopt::Long};
33         error($@) if $@;
34         Getopt::Long::Configure('pass_through');
35         GetOptions(
36                 "aggregate" => \$config{aggregate},
37                 "aggregateinternal!" => \$config{aggregateinternal},
38         );
39 }
41 sub getsetup () {
42         return
43                 plugin => {
44                         safe => 1,
45                         rebuild => undef,
46                 },
47                 aggregateinternal => {
48                         type => "boolean",
49                         example => 1,
50                         description => "enable aggregation to internal pages?",
51                         safe => 0, # enabling needs manual transition
52                         rebuild => 0,
53                 },
54                 aggregate_webtrigger => {
55                         type => "boolean",
56                         example => 0,
57                         description => "allow aggregation to be triggered via the web?",
58                         safe => 1,
59                         rebuild => 0,
60                 },
61                 cookiejar => {
62                         type => "string",
63                         example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
64                         safe => 0, # hooks into perl module internals
65                         description => "cookie control",
66                 },
67 }
69 sub checkconfig () {
70         if (! defined $config{aggregateinternal}) {
71                 $config{aggregateinternal}=1;
72         }
73         if (! defined $config{cookiejar}) {
74                 $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
75         }
77         # This is done here rather than in a refresh hook because it
78         # needs to run before the wiki is locked.
79         if ($config{aggregate} && ! ($config{post_commit} && 
80                                      IkiWiki::commit_hook_enabled())) {
81                 launchaggregation();
82         }
83 }
85 sub cgi ($) {
86         my $cgi=shift;
88         if (defined $cgi->param('do') &&
89             $cgi->param("do") eq "aggregate_webtrigger") {
90                 $|=1;
91                 print "Content-Type: text/plain\n\n";
92                 $config{cgi}=0;
93                 $config{verbose}=1;
94                 $config{syslog}=0;
95                 print gettext("Aggregation triggered via web.")."\n\n";
96                 if (launchaggregation()) {
97                         IkiWiki::lockwiki();
98                         IkiWiki::loadindex();
99                         require IkiWiki::Render;
100                         IkiWiki::refresh();
101                         IkiWiki::saveindex();
102                 }
103                 else {
104                         print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
105                 }
106                 exit 0;
107         }
110 sub launchaggregation () {
111         # See if any feeds need aggregation.
112         loadstate();
113         my @feeds=needsaggregate();
114         return unless @feeds;
115         if (! lockaggregate()) {
116                 error("an aggregation process is already running");
117         }
118         # force a later rebuild of source pages
119         $IkiWiki::forcerebuild{$_->{sourcepage}}=1
120                 foreach @feeds;
122         # Fork a child process to handle the aggregation.
123         # The parent process will then handle building the
124         # result. This avoids messy code to clear state
125         # accumulated while aggregating.
126         defined(my $pid = fork) or error("Can't fork: $!");
127         if (! $pid) {
128                 IkiWiki::loadindex();
129                 # Aggregation happens without the main wiki lock
130                 # being held. This allows editing pages etc while
131                 # aggregation is running.
132                 aggregate(@feeds);
134                 IkiWiki::lockwiki;
135                 # Merge changes, since aggregation state may have
136                 # changed on disk while the aggregation was happening.
137                 mergestate();
138                 expire();
139                 savestate();
140                 IkiWiki::unlockwiki;
141                 exit 0;
142         }
143         waitpid($pid,0);
144         if ($?) {
145                 error "aggregation failed with code $?";
146         }
148         clearstate();
149         unlockaggregate();
151         return 1;
154 #  Pages with extension _aggregated have plain html markup, pass through.
155 sub htmlize (@) {
156         my %params=@_;
157         return $params{content};
160 # Used by ikiwiki-transition aggregateinternal.
161 sub migrate_to_internal {
162         if (! lockaggregate()) {
163                 error("an aggregation process is currently running");
164         }
166         IkiWiki::lockwiki();
167         loadstate();
168         $config{verbose}=1;
170         foreach my $data (values %guids) {
171                 next unless $data->{page};
172                 next if $data->{expired};
173                 
174                 $config{aggregateinternal} = 0;
175                 my $oldname = "$config{srcdir}/".htmlfn($data->{page});
176                 if (! -e $oldname) {
177                         $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
178                 }
180                 my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
181                 
182                 $config{aggregateinternal} = 1;
183                 my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
184                 
185                 debug "moving $oldname -> $newname";
186                 if (-e $newname) {
187                         if (-e $oldname) {
188                                 error("$newname already exists");
189                         }
190                         else {
191                                 debug("already renamed to $newname?");
192                         }
193                 }
194                 elsif (-e $oldname) {
195                         rename($oldname, $newname) || error("$!");
196                 }
197                 else {
198                         debug("$oldname not found");
199                 }
200                 if (-e $oldoutput) {
201                         require IkiWiki::Render;
202                         debug("removing output file $oldoutput");
203                         IkiWiki::prune($oldoutput, $config{destdir});
204                 }
205         }
206         
207         savestate();
208         IkiWiki::unlockwiki;
209         
210         unlockaggregate();
213 sub needsbuild (@) {
214         my $needsbuild=shift;
215         
216         loadstate();
218         foreach my $feed (values %feeds) {
219                 if (exists $pagesources{$feed->{sourcepage}} && 
220                     grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
221                         # Mark all feeds originating on this page as 
222                         # not yet seen; preprocess will unmark those that
223                         # still exist.
224                         markunseen($feed->{sourcepage});
225                 }
226         }
228         return $needsbuild;
231 sub preprocess (@) {
232         my %params=@_;
234         foreach my $required (qw{name url}) {
235                 if (! exists $params{$required}) {
236                         error sprintf(gettext("missing %s parameter"), $required)
237                 }
238         }
240         my $feed={};
241         my $name=$params{name};
242         if (exists $feeds{$name}) {
243                 $feed=$feeds{$name};
244         }
245         else {
246                 $feeds{$name}=$feed;
247         }
248         $feed->{name}=$name;
249         $feed->{sourcepage}=$params{page};
250         $feed->{url}=$params{url};
251         my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
252         $dir=~s/^\/+//;
253         ($dir)=$dir=~/$config{wiki_file_regexp}/;
254         $feed->{dir}=$dir;
255         $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
256         $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
257         $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
258         $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
259         if (exists $params{template}) {
260                 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
261         }
262         else {
263                 $params{template} = "aggregatepost"
264         }
265         $feed->{template}=$params{template} . ".tmpl";
266         delete $feed->{unseen};
267         $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
268         $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
269         $feed->{numposts}=0 unless defined $feed->{numposts};
270         $feed->{newposts}=0 unless defined $feed->{newposts};
271         $feed->{message}=gettext("new feed") unless defined $feed->{message};
272         $feed->{error}=0 unless defined $feed->{error};
273         $feed->{tags}=[];
274         while (@_) {
275                 my $key=shift;
276                 my $value=shift;
277                 if ($key eq 'tag') {
278                         push @{$feed->{tags}}, $value;
279                 }
280         }
282         return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
283                ($feed->{error} ? "<em>" : "").$feed->{message}.
284                ($feed->{error} ? "</em>" : "").
285                " (".$feed->{numposts}." ".gettext("posts").
286                ($feed->{newposts} ? "; ".$feed->{newposts}.
287                                     " ".gettext("new") : "").
288                ")";
291 sub delete (@) {
292         my @files=@_;
294         # Remove feed data for removed pages.
295         foreach my $file (@files) {
296                 my $page=pagename($file);
297                 markunseen($page);
298         }
301 sub markunseen ($) {
302         my $page=shift;
304         foreach my $id (keys %feeds) {
305                 if ($feeds{$id}->{sourcepage} eq $page) {
306                         $feeds{$id}->{unseen}=1;
307                 }
308         }
311 my $state_loaded=0;
313 sub loadstate () {
314         return if $state_loaded;
315         $state_loaded=1;
316         if (-e "$config{wikistatedir}/aggregate") {
317                 open(IN, "<", "$config{wikistatedir}/aggregate") ||
318                         die "$config{wikistatedir}/aggregate: $!";
319                 while (<IN>) {
320                         $_=IkiWiki::possibly_foolish_untaint($_);
321                         chomp;
322                         my $data={};
323                         foreach my $i (split(/ /, $_)) {
324                                 my ($field, $val)=split(/=/, $i, 2);
325                                 if ($field eq "name" || $field eq "feed" ||
326                                     $field eq "guid" || $field eq "message") {
327                                         $data->{$field}=decode_entities($val, " \t\n");
328                                 }
329                                 elsif ($field eq "tag") {
330                                         push @{$data->{tags}}, $val;
331                                 }
332                                 else {
333                                         $data->{$field}=$val;
334                                 }
335                         }
336                         
337                         if (exists $data->{name}) {
338                                 $feeds{$data->{name}}=$data;
339                         }
340                         elsif (exists $data->{guid}) {
341                                 $guids{$data->{guid}}=$data;
342                         }
343                 }
345                 close IN;
346         }
349 sub savestate () {
350         return unless $state_loaded;
351         garbage_collect();
352         my $newfile="$config{wikistatedir}/aggregate.new";
353         my $cleanup = sub { unlink($newfile) };
354         open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
355         foreach my $data (values %feeds, values %guids) {
356                 my @line;
357                 foreach my $field (keys %$data) {
358                         if ($field eq "name" || $field eq "feed" ||
359                             $field eq "guid" || $field eq "message") {
360                                 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
361                         }
362                         elsif ($field eq "tags") {
363                                 push @line, "tag=$_" foreach @{$data->{tags}};
364                         }
365                         else {
366                                 push @line, "$field=".$data->{$field}
367                                         if defined $data->{$field};
368                         }
369                 }
370                 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
371         }
372         close OUT || error("save $newfile: $!", $cleanup);
373         rename($newfile, "$config{wikistatedir}/aggregate") ||
374                 error("rename $newfile: $!", $cleanup);
376         my $timestamp=undef;
377         foreach my $feed (keys %feeds) {
378                 my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
379                 if (! defined $timestamp || $timestamp > $t) {
380                         $timestamp=$t;
381                 }
382         }
383         $newfile=~s/\.new$/time/;
384         open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
385         if (defined $timestamp) {
386                 print OUT $timestamp."\n";
387         }
388         close OUT || error("save $newfile: $!", $cleanup);
391 sub garbage_collect () {
392         foreach my $name (keys %feeds) {
393                 # remove any feeds that were not seen while building the pages
394                 # that used to contain them
395                 if ($feeds{$name}->{unseen}) {
396                         delete $feeds{$name};
397                 }
398         }
400         foreach my $guid (values %guids) {
401                 # any guid whose feed is gone should be removed
402                 if (! exists $feeds{$guid->{feed}}) {
403                         if (exists $guid->{page}) {
404                                 unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
405                                         || unlink "$config{srcdir}/".htmlfn($guid->{page});
406                         }
407                         delete $guids{$guid->{guid}};
408                 }
409                 # handle expired guids
410                 elsif ($guid->{expired} && exists $guid->{page}) {
411                         unlink "$config{srcdir}/".htmlfn($guid->{page});
412                         unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
413                         delete $guid->{page};
414                         delete $guid->{md5};
415                 }
416         }
419 sub mergestate () {
420         # Load the current state in from disk, and merge into it
421         # values from the state in memory that might have changed
422         # during aggregation.
423         my %myfeeds=%feeds;
424         my %myguids=%guids;
425         clearstate();
426         loadstate();
428         # All that can change in feed state during aggregation is a few
429         # fields.
430         foreach my $name (keys %myfeeds) {
431                 if (exists $feeds{$name}) {
432                         foreach my $field (qw{message lastupdate lasttry
433                                               numposts newposts error}) {
434                                 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
435                         }
436                 }
437         }
439         # New guids can be created during aggregation.
440         # Guids have a few fields that may be updated during aggregation.
441         # It's also possible that guids were removed from the on-disk state
442         # while the aggregation was in process. That would only happen if
443         # their feed was also removed, so any removed guids added back here
444         # will be garbage collected later.
445         foreach my $guid (keys %myguids) {
446                 if (! exists $guids{$guid}) {
447                         $guids{$guid}=$myguids{$guid};
448                 }
449                 else {
450                         foreach my $field (qw{md5}) {
451                                 $guids{$guid}->{$field}=$myguids{$guid}->{$field};
452                         }
453                 }
454         }
457 sub clearstate () {
458         %feeds=();
459         %guids=();
460         $state_loaded=0;
463 sub expire () {
464         foreach my $feed (values %feeds) {
465                 next unless $feed->{expireage} || $feed->{expirecount};
466                 my $count=0;
467                 my %seen;
468                 foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
469                                   grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
470                                   values %guids) {
471                         if ($feed->{expireage}) {
472                                 my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
473                                 if ($days_old > $feed->{expireage}) {
474                                         debug(sprintf(gettext("expiring %s (%s days old)"),
475                                                 $item->{page}, int($days_old)));
476                                         $item->{expired}=1;
477                                 }
478                         }
479                         elsif ($feed->{expirecount} &&
480                                $count >= $feed->{expirecount}) {
481                                 debug(sprintf(gettext("expiring %s"), $item->{page}));
482                                 $item->{expired}=1;
483                         }
484                         else {
485                                 if (! $seen{$item->{page}}) {
486                                         $seen{$item->{page}}=1;
487                                         $count++;
488                                 }
489                         }
490                 }
491         }
494 sub needsaggregate () {
495         return values %feeds if $config{rebuild};
496         return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
499 sub aggregate (@) {
500         eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
501         eval q{use XML::Feed};
502         error($@) if $@;
503         eval q{use URI::Fetch};
504         error($@) if $@;
506         foreach my $feed (@_) {
507                 $feed->{lasttry}=time;
508                 $feed->{newposts}=0;
509                 $feed->{message}=sprintf(gettext("last checked %s"),
510                         displaytime($feed->{lasttry}));
511                 $feed->{error}=0;
513                 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
515                 if (! length $feed->{feedurl}) {
516                         my @urls=XML::Feed->find_feeds($feed->{url});
517                         if (! @urls) {
518                                 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
519                                 $feed->{error}=1;
520                                 debug($feed->{message});
521                                 next;
522                         }
523                         $feed->{feedurl}=pop @urls;
524                 }
525                 my $res=URI::Fetch->fetch($feed->{feedurl},
526                         UserAgent => LWP::UserAgent->new(
527                                 cookie_jar => $config{cookiejar},
528                         ),
529                 );
530                 if (! $res) {
531                         $feed->{message}=URI::Fetch->errstr;
532                         $feed->{error}=1;
533                         debug($feed->{message});
534                         next;
535                 }
537                 # lastupdate is only set if we were able to contact the server
538                 $feed->{lastupdate}=$feed->{lasttry};
540                 if ($res->status == URI::Fetch::URI_GONE()) {
541                         $feed->{message}=gettext("feed not found");
542                         $feed->{error}=1;
543                         debug($feed->{message});
544                         next;
545                 }
546                 my $content=$res->content;
547                 my $f=eval{XML::Feed->parse(\$content)};
548                 if ($@) {
549                         # One common cause of XML::Feed crashing is a feed
550                         # that contains invalid UTF-8 sequences. Convert
551                         # feed to ascii to try to work around.
552                         $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
553                         $f=eval {
554                                 $content=Encode::decode_utf8($content, 0);
555                                 XML::Feed->parse(\$content)
556                         };
557                 }
558                 if ($@) {
559                         # Another possibility is badly escaped entities.
560                         $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
561                         $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
562                         $f=eval {
563                                 $content=Encode::decode_utf8($content, 0);
564                                 XML::Feed->parse(\$content)
565                         };
566                 }
567                 if ($@) {
568                         $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
569                         $feed->{error}=1;
570                         debug($feed->{message});
571                         next;
572                 }
573                 if (! $f) {
574                         $feed->{message}=XML::Feed->errstr;
575                         $feed->{error}=1;
576                         debug($feed->{message});
577                         next;
578                 }
580                 foreach my $entry ($f->entries) {
581                         # XML::Feed doesn't work around XML::Atom's bizarre
582                         # API, so we will. Real unicode strings? Yes please.
583                         # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
584                         local $XML::Atom::ForceUnicode = 1;
586                         my $c=$entry->content;
587                         # atom feeds may have no content, only a summary
588                         if (! defined $c && ref $entry->summary) {
589                                 $c=$entry->summary;
590                         }
592                         add_page(
593                                 feed => $feed,
594                                 copyright => $f->copyright,
595                                 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
596                                 author => defined $entry->author ? decode_entities($entry->author) : "",
597                                 link => $entry->link,
598                                 content => (defined $c && defined $c->body) ? $c->body : "",
599                                 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
600                                 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
601                                 base => (defined $c && $c->can("base")) ? $c->base : undef,
602                         );
603                 }
604         }
607 sub add_page (@) {
608         my %params=@_;
609         
610         my $feed=$params{feed};
611         my $guid={};
612         my $mtime;
613         if (exists $guids{$params{guid}}) {
614                 # updating an existing post
615                 $guid=$guids{$params{guid}};
616                 return if $guid->{expired};
617                 write_page($feed, $guid, $mtime, \%params);
618         }
619         else {
620                 # new post
621                 $guid->{guid}=$params{guid};
622                 $guids{$params{guid}}=$guid;
623                 $mtime=$params{ctime};
624                 $feed->{numposts}++;
625                 $feed->{newposts}++;
627                 # assign it an unused page
628                 my $page=titlepage($params{title});
629                 # escape slashes and periods in title so it doesn't specify
630                 # directory name or trigger ".." disallowing code.
631                 $page=~s!([/.])!"__".ord($1)."__"!eg;
632                 $page=$feed->{dir}."/".$page;
633                 ($page)=$page=~/$config{wiki_file_regexp}/;
634                 if (! defined $page || ! length $page) {
635                         $page=$feed->{dir}."/item";
636                 }
637                 my $c="";
638                 while (exists $IkiWiki::pagecase{lc $page.$c} ||
639                        -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
640                        -e "$config{srcdir}/".htmlfn($page.$c)) {
641                         $c++
642                 }
644                 $guid->{page}=$page;
645                 eval { write_page($feed, $guid, $mtime, \%params) };
646                 if ($@) {
647                         # assume failure was due to a too long filename
648                         # (or o
649                         $c="";
650                         $page=$feed->{dir}."/item";
651                         while (exists $IkiWiki::pagecase{lc $page.$c} ||
652                               -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
653                               -e "$config{srcdir}/".htmlfn($page.$c)) {
654                                 $c++
655                         }
657                         $guid->{page}=$page;
658                         write_page($feed, $guid, $mtime, \%params);
659                 }
661                 debug(sprintf(gettext("creating new page %s"), $page));
662         }
665 sub write_page ($$$$$) {
666         my $feed=shift;
667         my $guid=shift;
668         my $mtime=shift;
669         my %params=%{shift()};
671         $guid->{feed}=$feed->{name};
672         
673         # To write or not to write? Need to avoid writing unchanged pages
674         # to avoid unneccessary rebuilding. The mtime from rss cannot be
675         # trusted; let's use a digest.
676         eval q{use Digest::MD5 'md5_hex'};
677         error($@) if $@;
678         require Encode;
679         my $digest=md5_hex(Encode::encode_utf8($params{content}));
680         return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
681         $guid->{md5}=$digest;
683         # Create the page.
684         my $template;
685         eval {
686                 $template=template($feed->{template}, blind_cache => 1);
687         };
688         if ($@) {
689                 print STDERR gettext("failed to process template:")." $@";
690                 return;
691         }
692         $template->param(title => $params{title})
693                 if defined $params{title} && length($params{title});
694         $template->param(author => $params{author})
695                 if defined $params{author} && length($params{author}
696                         && $params{author} ne $feed->{name});
697         $template->param(content => wikiescape(htmlabs($params{content},
698                 defined $params{base} ? $params{base} : $feed->{feedurl})));
699         $template->param(name => $feed->{name});
700         $template->param(url => $feed->{url});
701         $template->param(copyright => $params{copyright})
702                 if defined $params{copyright} && length $params{copyright};
703         $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
704                 if defined $params{link};
705         if (ref $feed->{tags}) {
706                 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
707         }
708         writefile(htmlfn($guid->{page}),
709                 $IkiWiki::Plugin::transient::transientdir, $template->output);
711         if (defined $mtime && $mtime <= time) {
712                 # Set the mtime, this lets the build process get the right
713                 # creation time on record for the new page.
714                 utime $mtime, $mtime,
715                         $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
716                 # Store it in pagectime for expiry code to use also.
717                 $IkiWiki::pagectime{$guid->{page}}=$mtime
718                         unless exists $IkiWiki::pagectime{$guid->{page}};
719         }
720         else {
721                 # Dummy value for expiry code.
722                 $IkiWiki::pagectime{$guid->{page}}=time
723                         unless exists $IkiWiki::pagectime{$guid->{page}};
724         }
727 sub wikiescape ($) {
728         # escape accidental wikilinks and preprocessor stuff
729         return encode_entities(shift, '\[\]');
732 sub htmlabs ($$) {
733         # Convert links in html from relative to absolute.
734         # Note that this is a heuristic, which is not specified by the rss
735         # spec and may not be right for all feeds. Also, see Debian
736         # bug #381359.
737         my $html=shift;
738         my $urlbase=shift;
740         my $ret="";
741         my $p = HTML::Parser->new(api_version => 3);
742         $p->handler(default => sub { $ret.=join("", @_) }, "text");
743         $p->handler(start => sub {
744                 my ($tagname, $pos, $text) = @_;
745                 if (ref $HTML::Tagset::linkElements{$tagname}) {
746                         while (4 <= @$pos) {
747                                 # use attribute sets from right to left
748                                 # to avoid invalidating the offsets
749                                 # when replacing the values
750                                 my($k_offset, $k_len, $v_offset, $v_len) =
751                                         splice(@$pos, -4);
752                                 my $attrname = lc(substr($text, $k_offset, $k_len));
753                                 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
754                                 next unless $v_offset; # 0 v_offset means no value
755                                 my $v = substr($text, $v_offset, $v_len);
756                                 $v =~ s/^([\'\"])(.*)\1$/$2/;
757                                 my $new_v=IkiWiki::urlabs($v, $urlbase);
758                                 $new_v =~ s/\"/&quot;/g; # since we quote with ""
759                                 substr($text, $v_offset, $v_len) = qq("$new_v");
760                         }
761                 }
762                 $ret.=$text;
763         }, "tagname, tokenpos, text");
764         $p->parse($html);
765         $p->eof;
767         return $ret;
770 sub htmlfn ($) {
771         return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
774 my $aggregatelock;
776 sub lockaggregate () {
777         # Take an exclusive lock to prevent multiple concurrent aggregators.
778         # Returns true if the lock was aquired.
779         if (! -d $config{wikistatedir}) {
780                 mkdir($config{wikistatedir});
781         }
782         open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
783                 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
784         if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
785                 close($aggregatelock) || error("failed closing aggregatelock: $!");
786                 return 0;
787         }
788         return 1;
791 sub unlockaggregate () {
792         return close($aggregatelock) if $aggregatelock;
793         return;