]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blob - IkiWiki/Plugin/aggregate.pm
aggregate: Read cookies from ~/.ikiwiki/cookies by default. Also, the cookiejar confi...
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
5 use warnings;
6 use strict;
7 use IkiWiki 3.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use open qw{:utf8 :std};
13 my %feeds;
14 my %guids;
16 sub import {
17         hook(type => "getopt", id => "aggregate", call => \&getopt);
18         hook(type => "getsetup", id => "aggregate", call => \&getsetup);
19         hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
20         hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
21         hook(type => "preprocess", id => "aggregate", call => \&preprocess);
22         hook(type => "delete", id => "aggregate", call => \&delete);
23         hook(type => "savestate", id => "aggregate", call => \&savestate);
24         hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
25         if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
26                 hook(type => "cgi", id => "aggregate", call => \&cgi);
27         }
28 }
30 sub getopt () {
31         eval q{use Getopt::Long};
32         error($@) if $@;
33         Getopt::Long::Configure('pass_through');
34         GetOptions(
35                 "aggregate" => \$config{aggregate},
36                 "aggregateinternal!" => \$config{aggregateinternal},
37         );
38 }
40 sub getsetup () {
41         return
42                 plugin => {
43                         safe => 1,
44                         rebuild => undef,
45                 },
46                 aggregateinternal => {
47                         type => "boolean",
48                         example => 1,
49                         description => "enable aggregation to internal pages?",
50                         safe => 0, # enabling needs manual transition
51                         rebuild => 0,
52                 },
53                 aggregate_webtrigger => {
54                         type => "boolean",
55                         example => 0,
56                         description => "allow aggregation to be triggered via the web?",
57                         safe => 1,
58                         rebuild => 0,
59                 },
60                 cookiejar => {
61                         type => "string",
62                         example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
63                         safe => 0, # hooks into perl module internals
64                         description => "cookie control",
65                 },
66 }
68 sub checkconfig () {
69         if (! defined $config{aggregateinternal}) {
70                 $config{aggregateinternal}=1;
71         }
72         if (! defined $config{cookies}) {
73                 $config{cookies}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
74         }
76         if ($config{aggregate} && ! ($config{post_commit} && 
77                                      IkiWiki::commit_hook_enabled())) {
78                 launchaggregation();
79         }
80 }
82 sub cgi ($) {
83         my $cgi=shift;
85         if (defined $cgi->param('do') &&
86             $cgi->param("do") eq "aggregate_webtrigger") {
87                 $|=1;
88                 print "Content-Type: text/plain\n\n";
89                 $config{cgi}=0;
90                 $config{verbose}=1;
91                 $config{syslog}=0;
92                 print gettext("Aggregation triggered via web.")."\n\n";
93                 if (launchaggregation()) {
94                         IkiWiki::lockwiki();
95                         IkiWiki::loadindex();
96                         require IkiWiki::Render;
97                         IkiWiki::refresh();
98                         IkiWiki::saveindex();
99                 }
100                 else {
101                         print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
102                 }
103                 exit 0;
104         }
107 sub launchaggregation () {
108         # See if any feeds need aggregation.
109         loadstate();
110         my @feeds=needsaggregate();
111         return unless @feeds;
112         if (! lockaggregate()) {
113                 debug("an aggregation process is already running");
114                 return;
115         }
116         # force a later rebuild of source pages
117         $IkiWiki::forcerebuild{$_->{sourcepage}}=1
118                 foreach @feeds;
120         # Fork a child process to handle the aggregation.
121         # The parent process will then handle building the
122         # result. This avoids messy code to clear state
123         # accumulated while aggregating.
124         defined(my $pid = fork) or error("Can't fork: $!");
125         if (! $pid) {
126                 IkiWiki::loadindex();
127                 # Aggregation happens without the main wiki lock
128                 # being held. This allows editing pages etc while
129                 # aggregation is running.
130                 aggregate(@feeds);
132                 IkiWiki::lockwiki;
133                 # Merge changes, since aggregation state may have
134                 # changed on disk while the aggregation was happening.
135                 mergestate();
136                 expire();
137                 savestate();
138                 IkiWiki::unlockwiki;
139                 exit 0;
140         }
141         waitpid($pid,0);
142         if ($?) {
143                 error "aggregation failed with code $?";
144         }
146         clearstate();
147         unlockaggregate();
149         return 1;
152 #  Pages with extension _aggregated have plain html markup, pass through.
153 sub htmlize (@) {
154         my %params=@_;
155         return $params{content};
158 # Used by ikiwiki-transition aggregateinternal.
159 sub migrate_to_internal {
160         if (! lockaggregate()) {
161                 error("an aggregation process is currently running");
162         }
164         IkiWiki::lockwiki();
165         loadstate();
166         $config{verbose}=1;
168         foreach my $data (values %guids) {
169                 next unless $data->{page};
170                 next if $data->{expired};
171                 
172                 $config{aggregateinternal} = 0;
173                 my $oldname = "$config{srcdir}/".htmlfn($data->{page});
174                 if (! -e $oldname) {
175                         $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
176                 }
178                 my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
179                 
180                 $config{aggregateinternal} = 1;
181                 my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
182                 
183                 debug "moving $oldname -> $newname";
184                 if (-e $newname) {
185                         if (-e $oldname) {
186                                 error("$newname already exists");
187                         }
188                         else {
189                                 debug("already renamed to $newname?");
190                         }
191                 }
192                 elsif (-e $oldname) {
193                         rename($oldname, $newname) || error("$!");
194                 }
195                 else {
196                         debug("$oldname not found");
197                 }
198                 if (-e $oldoutput) {
199                         require IkiWiki::Render;
200                         debug("removing output file $oldoutput");
201                         IkiWiki::prune($oldoutput);
202                 }
203         }
204         
205         savestate();
206         IkiWiki::unlockwiki;
207         
208         unlockaggregate();
211 sub needsbuild (@) {
212         my $needsbuild=shift;
213         
214         loadstate();
216         foreach my $feed (values %feeds) {
217                 if (exists $pagesources{$feed->{sourcepage}} && 
218                     grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
219                         # Mark all feeds originating on this page as 
220                         # not yet seen; preprocess will unmark those that
221                         # still exist.
222                         markunseen($feed->{sourcepage});
223                 }
224         }
226         return $needsbuild;
229 sub preprocess (@) {
230         my %params=@_;
232         foreach my $required (qw{name url}) {
233                 if (! exists $params{$required}) {
234                         error sprintf(gettext("missing %s parameter"), $required)
235                 }
236         }
238         my $feed={};
239         my $name=$params{name};
240         if (exists $feeds{$name}) {
241                 $feed=$feeds{$name};
242         }
243         else {
244                 $feeds{$name}=$feed;
245         }
246         $feed->{name}=$name;
247         $feed->{sourcepage}=$params{page};
248         $feed->{url}=$params{url};
249         my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
250         $dir=~s/^\/+//;
251         ($dir)=$dir=~/$config{wiki_file_regexp}/;
252         $feed->{dir}=$dir;
253         $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
254         $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
255         $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
256         $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
257         if (exists $params{template}) {
258                 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
259         }
260         else {
261                 $params{template} = "aggregatepost"
262         }
263         $feed->{template}=$params{template} . ".tmpl";
264         delete $feed->{unseen};
265         $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
266         $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
267         $feed->{numposts}=0 unless defined $feed->{numposts};
268         $feed->{newposts}=0 unless defined $feed->{newposts};
269         $feed->{message}=gettext("new feed") unless defined $feed->{message};
270         $feed->{error}=0 unless defined $feed->{error};
271         $feed->{tags}=[];
272         while (@_) {
273                 my $key=shift;
274                 my $value=shift;
275                 if ($key eq 'tag') {
276                         push @{$feed->{tags}}, $value;
277                 }
278         }
280         return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
281                ($feed->{error} ? "<em>" : "").$feed->{message}.
282                ($feed->{error} ? "</em>" : "").
283                " (".$feed->{numposts}." ".gettext("posts").
284                ($feed->{newposts} ? "; ".$feed->{newposts}.
285                                     " ".gettext("new") : "").
286                ")";
289 sub delete (@) {
290         my @files=@_;
292         # Remove feed data for removed pages.
293         foreach my $file (@files) {
294                 my $page=pagename($file);
295                 markunseen($page);
296         }
299 sub markunseen ($) {
300         my $page=shift;
302         foreach my $id (keys %feeds) {
303                 if ($feeds{$id}->{sourcepage} eq $page) {
304                         $feeds{$id}->{unseen}=1;
305                 }
306         }
309 my $state_loaded=0;
311 sub loadstate () {
312         return if $state_loaded;
313         $state_loaded=1;
314         if (-e "$config{wikistatedir}/aggregate") {
315                 open(IN, "<", "$config{wikistatedir}/aggregate") ||
316                         die "$config{wikistatedir}/aggregate: $!";
317                 while (<IN>) {
318                         $_=IkiWiki::possibly_foolish_untaint($_);
319                         chomp;
320                         my $data={};
321                         foreach my $i (split(/ /, $_)) {
322                                 my ($field, $val)=split(/=/, $i, 2);
323                                 if ($field eq "name" || $field eq "feed" ||
324                                     $field eq "guid" || $field eq "message") {
325                                         $data->{$field}=decode_entities($val, " \t\n");
326                                 }
327                                 elsif ($field eq "tag") {
328                                         push @{$data->{tags}}, $val;
329                                 }
330                                 else {
331                                         $data->{$field}=$val;
332                                 }
333                         }
334                         
335                         if (exists $data->{name}) {
336                                 $feeds{$data->{name}}=$data;
337                         }
338                         elsif (exists $data->{guid}) {
339                                 $guids{$data->{guid}}=$data;
340                         }
341                 }
343                 close IN;
344         }
347 sub savestate () {
348         return unless $state_loaded;
349         garbage_collect();
350         my $newfile="$config{wikistatedir}/aggregate.new";
351         my $cleanup = sub { unlink($newfile) };
352         open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
353         foreach my $data (values %feeds, values %guids) {
354                 my @line;
355                 foreach my $field (keys %$data) {
356                         if ($field eq "name" || $field eq "feed" ||
357                             $field eq "guid" || $field eq "message") {
358                                 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
359                         }
360                         elsif ($field eq "tags") {
361                                 push @line, "tag=$_" foreach @{$data->{tags}};
362                         }
363                         else {
364                                 push @line, "$field=".$data->{$field}
365                                         if defined $data->{$field};
366                         }
367                 }
368                 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
369         }
370         close OUT || error("save $newfile: $!", $cleanup);
371         rename($newfile, "$config{wikistatedir}/aggregate") ||
372                 error("rename $newfile: $!", $cleanup);
374         my $timestamp=undef;
375         foreach my $feed (keys %feeds) {
376                 my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
377                 if (! defined $timestamp || $timestamp > $t) {
378                         $timestamp=$t;
379                 }
380         }
381         $newfile=~s/\.new$/time/;
382         open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
383         if (defined $timestamp) {
384                 print OUT $timestamp."\n";
385         }
386         close OUT || error("save $newfile: $!", $cleanup);
389 sub garbage_collect () {
390         foreach my $name (keys %feeds) {
391                 # remove any feeds that were not seen while building the pages
392                 # that used to contain them
393                 if ($feeds{$name}->{unseen}) {
394                         delete $feeds{$name};
395                 }
396         }
398         foreach my $guid (values %guids) {
399                 # any guid whose feed is gone should be removed
400                 if (! exists $feeds{$guid->{feed}}) {
401                         if (exists $guid->{page}) {
402                                 unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
403                                         || unlink "$config{srcdir}/".htmlfn($guid->{page});
404                         }
405                         delete $guids{$guid->{guid}};
406                 }
407                 # handle expired guids
408                 elsif ($guid->{expired} && exists $guid->{page}) {
409                         unlink "$config{srcdir}/".htmlfn($guid->{page});
410                         unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
411                         delete $guid->{page};
412                         delete $guid->{md5};
413                 }
414         }
417 sub mergestate () {
418         # Load the current state in from disk, and merge into it
419         # values from the state in memory that might have changed
420         # during aggregation.
421         my %myfeeds=%feeds;
422         my %myguids=%guids;
423         clearstate();
424         loadstate();
426         # All that can change in feed state during aggregation is a few
427         # fields.
428         foreach my $name (keys %myfeeds) {
429                 if (exists $feeds{$name}) {
430                         foreach my $field (qw{message lastupdate lasttry
431                                               numposts newposts error}) {
432                                 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
433                         }
434                 }
435         }
437         # New guids can be created during aggregation.
438         # Guids have a few fields that may be updated during aggregation.
439         # It's also possible that guids were removed from the on-disk state
440         # while the aggregation was in process. That would only happen if
441         # their feed was also removed, so any removed guids added back here
442         # will be garbage collected later.
443         foreach my $guid (keys %myguids) {
444                 if (! exists $guids{$guid}) {
445                         $guids{$guid}=$myguids{$guid};
446                 }
447                 else {
448                         foreach my $field (qw{md5}) {
449                                 $guids{$guid}->{$field}=$myguids{$guid}->{$field};
450                         }
451                 }
452         }
455 sub clearstate () {
456         %feeds=();
457         %guids=();
458         $state_loaded=0;
461 sub expire () {
462         foreach my $feed (values %feeds) {
463                 next unless $feed->{expireage} || $feed->{expirecount};
464                 my $count=0;
465                 my %seen;
466                 foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
467                                   grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
468                                   values %guids) {
469                         if ($feed->{expireage}) {
470                                 my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
471                                 if ($days_old > $feed->{expireage}) {
472                                         debug(sprintf(gettext("expiring %s (%s days old)"),
473                                                 $item->{page}, int($days_old)));
474                                         $item->{expired}=1;
475                                 }
476                         }
477                         elsif ($feed->{expirecount} &&
478                                $count >= $feed->{expirecount}) {
479                                 debug(sprintf(gettext("expiring %s"), $item->{page}));
480                                 $item->{expired}=1;
481                         }
482                         else {
483                                 if (! $seen{$item->{page}}) {
484                                         $seen{$item->{page}}=1;
485                                         $count++;
486                                 }
487                         }
488                 }
489         }
492 sub needsaggregate () {
493         return values %feeds if $config{rebuild};
494         return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
497 sub aggregate (@) {
498         eval q{use XML::Feed};
499         error($@) if $@;
500         eval q{use URI::Fetch};
501         error($@) if $@;
503         foreach my $feed (@_) {
504                 $feed->{lasttry}=time;
505                 $feed->{newposts}=0;
506                 $feed->{message}=sprintf(gettext("last checked %s"),
507                         displaytime($feed->{lasttry}));
508                 $feed->{error}=0;
510                 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
512                 if (! length $feed->{feedurl}) {
513                         my @urls=XML::Feed->find_feeds($feed->{url});
514                         if (! @urls) {
515                                 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
516                                 $feed->{error}=1;
517                                 debug($feed->{message});
518                                 next;
519                         }
520                         $feed->{feedurl}=pop @urls;
521                 }
522                 my $res=URI::Fetch->fetch($feed->{feedurl},
523                         UserAgent => LWP::UserAgent->new(
524                                 cookie_jar => $config{cookiejar},
525                         ),
526                 );
527                 if (! $res) {
528                         $feed->{message}=URI::Fetch->errstr;
529                         $feed->{error}=1;
530                         debug($feed->{message});
531                         next;
532                 }
534                 # lastupdate is only set if we were able to contact the server
535                 $feed->{lastupdate}=$feed->{lasttry};
537                 if ($res->status == URI::Fetch::URI_GONE()) {
538                         $feed->{message}=gettext("feed not found");
539                         $feed->{error}=1;
540                         debug($feed->{message});
541                         next;
542                 }
543                 my $content=$res->content;
544                 my $f=eval{XML::Feed->parse(\$content)};
545                 if ($@) {
546                         # One common cause of XML::Feed crashing is a feed
547                         # that contains invalid UTF-8 sequences. Convert
548                         # feed to ascii to try to work around.
549                         $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
550                         $f=eval {
551                                 $content=Encode::decode_utf8($content, 0);
552                                 XML::Feed->parse(\$content)
553                         };
554                 }
555                 if ($@) {
556                         # Another possibility is badly escaped entities.
557                         $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
558                         $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
559                         $f=eval {
560                                 $content=Encode::decode_utf8($content, 0);
561                                 XML::Feed->parse(\$content)
562                         };
563                 }
564                 if ($@) {
565                         $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
566                         $feed->{error}=1;
567                         debug($feed->{message});
568                         next;
569                 }
570                 if (! $f) {
571                         $feed->{message}=XML::Feed->errstr;
572                         $feed->{error}=1;
573                         debug($feed->{message});
574                         next;
575                 }
577                 foreach my $entry ($f->entries) {
578                         # XML::Feed doesn't work around XML::Atom's bizarre
579                         # API, so we will. Real unicode strings? Yes please.
580                         # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
581                         local $XML::Atom::ForceUnicode = 1;
583                         my $c=$entry->content;
584                         # atom feeds may have no content, only a summary
585                         if (! defined $c && ref $entry->summary) {
586                                 $c=$entry->summary;
587                         }
589                         add_page(
590                                 feed => $feed,
591                                 copyright => $f->copyright,
592                                 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
593                                 link => $entry->link,
594                                 content => (defined $c && defined $c->body) ? $c->body : "",
595                                 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
596                                 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
597                                 base => (defined $c && $c->can("base")) ? $c->base : undef,
598                         );
599                 }
600         }
603 sub add_page (@) {
604         my %params=@_;
605         
606         my $feed=$params{feed};
607         my $guid={};
608         my $mtime;
609         if (exists $guids{$params{guid}}) {
610                 # updating an existing post
611                 $guid=$guids{$params{guid}};
612                 return if $guid->{expired};
613         }
614         else {
615                 # new post
616                 $guid->{guid}=$params{guid};
617                 $guids{$params{guid}}=$guid;
618                 $mtime=$params{ctime};
619                 $feed->{numposts}++;
620                 $feed->{newposts}++;
622                 # assign it an unused page
623                 my $page=titlepage($params{title});
624                 # escape slashes and periods in title so it doesn't specify
625                 # directory name or trigger ".." disallowing code.
626                 $page=~s!([/.])!"__".ord($1)."__"!eg;
627                 $page=$feed->{dir}."/".$page;
628                 ($page)=$page=~/$config{wiki_file_regexp}/;
629                 if (! defined $page || ! length $page) {
630                         $page=$feed->{dir}."/item";
631                 }
632                 my $c="";
633                 while (exists $IkiWiki::pagecase{lc $page.$c} ||
634                        -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
635                        -e "$config{srcdir}/".htmlfn($page.$c)) {
636                         $c++
637                 }
639                 # Make sure that the file name isn't too long. 
640                 # NB: This doesn't check for path length limits.
641                 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
642                 if (defined $max && length(htmlfn($page)) >= $max) {
643                         $c="";
644                         $page=$feed->{dir}."/item";
645                         while (exists $IkiWiki::pagecase{lc $page.$c} ||
646                               -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
648                                -e "$config{srcdir}/".htmlfn($page.$c)) {
649                                 $c++
650                         }
651                 }
653                 $guid->{page}=$page;
654                 debug(sprintf(gettext("creating new page %s"), $page));
655         }
656         $guid->{feed}=$feed->{name};
657         
658         # To write or not to write? Need to avoid writing unchanged pages
659         # to avoid unneccessary rebuilding. The mtime from rss cannot be
660         # trusted; let's use a digest.
661         eval q{use Digest::MD5 'md5_hex'};
662         error($@) if $@;
663         require Encode;
664         my $digest=md5_hex(Encode::encode_utf8($params{content}));
665         return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
666         $guid->{md5}=$digest;
668         # Create the page.
669         my $template;
670         eval {
671                 $template=template($feed->{template}, blind_cache => 1);
672         };
673         if ($@) {
674                 print STDERR gettext("failed to process template:")." $@";
675                 return;
676         }
677         $template->param(title => $params{title})
678                 if defined $params{title} && length($params{title});
679         $template->param(content => wikiescape(htmlabs($params{content},
680                 defined $params{base} ? $params{base} : $feed->{feedurl})));
681         $template->param(name => $feed->{name});
682         $template->param(url => $feed->{url});
683         $template->param(copyright => $params{copyright})
684                 if defined $params{copyright} && length $params{copyright};
685         $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
686                 if defined $params{link};
687         if (ref $feed->{tags}) {
688                 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
689         }
690         writefile(htmlfn($guid->{page}),
691                 $IkiWiki::Plugin::transient::transientdir, $template->output);
693         if (defined $mtime && $mtime <= time) {
694                 # Set the mtime, this lets the build process get the right
695                 # creation time on record for the new page.
696                 utime $mtime, $mtime,
697                         $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
698                 # Store it in pagectime for expiry code to use also.
699                 $IkiWiki::pagectime{$guid->{page}}=$mtime
700                         unless exists $IkiWiki::pagectime{$guid->{page}};
701         }
702         else {
703                 # Dummy value for expiry code.
704                 $IkiWiki::pagectime{$guid->{page}}=time
705                         unless exists $IkiWiki::pagectime{$guid->{page}};
706         }
709 sub wikiescape ($) {
710         # escape accidental wikilinks and preprocessor stuff
711         return encode_entities(shift, '\[\]');
714 sub htmlabs ($$) {
715         # Convert links in html from relative to absolute.
716         # Note that this is a heuristic, which is not specified by the rss
717         # spec and may not be right for all feeds. Also, see Debian
718         # bug #381359.
719         my $html=shift;
720         my $urlbase=shift;
722         my $ret="";
723         my $p = HTML::Parser->new(api_version => 3);
724         $p->handler(default => sub { $ret.=join("", @_) }, "text");
725         $p->handler(start => sub {
726                 my ($tagname, $pos, $text) = @_;
727                 if (ref $HTML::Tagset::linkElements{$tagname}) {
728                         while (4 <= @$pos) {
729                                 # use attribute sets from right to left
730                                 # to avoid invalidating the offsets
731                                 # when replacing the values
732                                 my($k_offset, $k_len, $v_offset, $v_len) =
733                                         splice(@$pos, -4);
734                                 my $attrname = lc(substr($text, $k_offset, $k_len));
735                                 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
736                                 next unless $v_offset; # 0 v_offset means no value
737                                 my $v = substr($text, $v_offset, $v_len);
738                                 $v =~ s/^([\'\"])(.*)\1$/$2/;
739                                 my $new_v=IkiWiki::urlabs($v, $urlbase);
740                                 $new_v =~ s/\"/&quot;/g; # since we quote with ""
741                                 substr($text, $v_offset, $v_len) = qq("$new_v");
742                         }
743                 }
744                 $ret.=$text;
745         }, "tagname, tokenpos, text");
746         $p->parse($html);
747         $p->eof;
749         return $ret;
752 sub htmlfn ($) {
753         return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
756 my $aggregatelock;
758 sub lockaggregate () {
759         # Take an exclusive lock to prevent multiple concurrent aggregators.
760         # Returns true if the lock was aquired.
761         if (! -d $config{wikistatedir}) {
762                 mkdir($config{wikistatedir});
763         }
764         open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
765                 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
766         if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
767                 close($aggregatelock) || error("failed closing aggregatelock: $!");
768                 return 0;
769         }
770         return 1;
773 sub unlockaggregate () {
774         return close($aggregatelock) if $aggregatelock;
775         return;