]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/aggregate.pm
po plugin: implement linking specification in one of the main cases
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
index 2e1ab66e644775faad5b0bcbc231bb5fbd1e0e95..c18784e8b8c64d143c06b469906beb2f329c087b 100644 (file)
 #!/usr/bin/perl
-# Blog aggregation plugin.
+# Feed aggregation plugin.
 package IkiWiki::Plugin::aggregate;
 
 use warnings;
 use strict;
-use IkiWiki;
-use HTML::Entities;
+use IkiWiki 2.00;
 use HTML::Parser;
 use HTML::Tagset;
+use HTML::Entities;
 use URI;
+use open qw{:utf8 :std};
 
 my %feeds;
 my %guids;
 
 sub import { #{{{
        hook(type => "getopt", id => "aggregate", call => \&getopt);
+       hook(type => "getsetup", id => "aggregate", call => \&getsetup);
        hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
-       hook(type => "filter", id => "aggregate", call => \&filter);
+       hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
        hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
        hook(type => "savestate", id => "aggregate", call => \&savestate);
+       hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
+       if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
+               hook(type => "cgi", id => "aggregate", call => \&cgi);
+       }
 } # }}}
 
 sub getopt () { #{{{
         eval q{use Getopt::Long};
+       error($@) if $@;
         Getopt::Long::Configure('pass_through');
-        GetOptions("aggregate" => \$config{aggregate});
+        GetOptions(
+               "aggregate" => \$config{aggregate},
+               "aggregateinternal!" => \$config{aggregateinternal},
+       );
+} #}}}
+
+sub getsetup () { #{{{
+       return
+               plugin => {
+                       safe => 1,
+                       rebuild => undef,
+               },
+               aggregateinternal => {
+                       type => "boolean",
+                       example => 0,
+                       description => "enable aggregation to internal pages?",
+                       safe => 0, # enabling needs manual transition
+                       rebuild => 0,
+               },
+               aggregate_webtrigger => {
+                       type => "boolean",
+                       example => 0,
+                       description => "allow aggregation to be triggered via the web?",
+                       safe => 1,
+                       rebuild => 0,
+               },
 } #}}}
 
 sub checkconfig () { #{{{
-       IkiWiki::lockwiki();
+       if ($config{aggregate} && ! ($config{post_commit} && 
+                                    IkiWiki::commit_hook_enabled())) {
+               launchaggregation();
+       }
+} #}}}
+
+sub cgi ($) { #{{{
+       my $cgi=shift;
+
+       if (defined $cgi->param('do') &&
+           $cgi->param("do") eq "aggregate_webtrigger") {
+               $|=1;
+               print "Content-Type: text/plain\n\n";
+               $config{cgi}=0;
+               $config{verbose}=1;
+               $config{syslog}=0;
+               print gettext("Aggregation triggered via web.")."\n\n";
+               if (launchaggregation()) {
+                       IkiWiki::lockwiki();
+                       IkiWiki::loadindex();
+                       require IkiWiki::Render;
+                       IkiWiki::refresh();
+                       IkiWiki::saveindex();
+               }
+               else {
+                       print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
+               }
+               exit 0;
+       }
+} #}}}
+
+sub launchaggregation () { #{{{
+       # See if any feeds need aggregation.
        loadstate();
-       if ($config{aggregate}) {
+       my @feeds=needsaggregate();
+       return unless @feeds;
+       if (! lockaggregate()) {
+               debug("an aggregation process is already running");
+               return;
+       }
+       # force a later rebuild of source pages
+       $IkiWiki::forcerebuild{$_->{sourcepage}}=1
+               foreach @feeds;
+
+       # Fork a child process to handle the aggregation.
+       # The parent process will then handle building the
+       # result. This avoids messy code to clear state
+       # accumulated while aggregating.
+       defined(my $pid = fork) or error("Can't fork: $!");
+       if (! $pid) {
                IkiWiki::loadindex();
-               aggregate();
+               # Aggregation happens without the main wiki lock
+               # being held. This allows editing pages etc while
+               # aggregation is running.
+               aggregate(@feeds);
+
+               IkiWiki::lockwiki;
+               # Merge changes, since aggregation state may have
+               # changed on disk while the aggregation was happening.
+               mergestate();
+               expire();
                savestate();
+               IkiWiki::unlockwiki;
+               exit 0;
+       }
+       waitpid($pid,0);
+       if ($?) {
+               error "aggregation failed with code $?";
        }
-       IkiWiki::unlockwiki();
+
+       clearstate();
+       unlockaggregate();
+
+       return 1;
 } #}}}
 
-sub filter (@) { #{{{
+#  Pages with extension _aggregated have plain html markup, pass through.
+sub htmlize (@) { #{{{
        my %params=@_;
-       my $page=$params{page};
+       return $params{content};
+} #}}}
 
-       # Mark all feeds originating on this page as removable;
-       # preprocess will unmark those that still exist.
-       remove_feeds($page);
+# Used by ikiwiki-transition aggregateinternal.
+sub migrate_to_internal { #{{{
+       if (! lockaggregate()) {
+               error("an aggregation process is currently running");
+       }
 
-       return $params{content};
+       IkiWiki::lockwiki();
+       loadstate();
+       $config{verbose}=1;
+
+       foreach my $data (values %guids) {
+               next unless $data->{page};
+               next if $data->{expired};
+               
+               $config{aggregateinternal} = 0;
+               my $oldname = "$config{srcdir}/".htmlfn($data->{page});
+               my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
+               
+               $config{aggregateinternal} = 1;
+               my $newname = "$config{srcdir}/".htmlfn($data->{page});
+               
+               debug "moving $oldname -> $newname";
+               if (-e $newname) {
+                       if (-e $oldname) {
+                               error("$newname already exists");
+                       }
+                       else {
+                               debug("already renamed to $newname?");
+                       }
+               }
+               elsif (-e $oldname) {
+                       rename($oldname, $newname) || error("$!");
+               }
+               else {
+                       debug("$oldname not found");
+               }
+               if (-e $oldoutput) {
+                       require IkiWiki::Render;
+                       debug("removing output file $oldoutput");
+                       IkiWiki::prune($oldoutput);
+               }
+       }
+       
+       savestate();
+       IkiWiki::unlockwiki;
+       
+       unlockaggregate();
+} #}}}
+
+sub needsbuild (@) { #{{{
+       my $needsbuild=shift;
+       
+       loadstate();
+
+       foreach my $feed (values %feeds) {
+               if (exists $pagesources{$feed->{sourcepage}} && 
+                   grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
+                       # Mark all feeds originating on this page as 
+                       # not yet seen; preprocess will unmark those that
+                       # still exist.
+                       markunseen($feed->{sourcepage});
+               }
+       }
 } # }}}
 
 sub preprocess (@) { #{{{
@@ -55,7 +213,7 @@ sub preprocess (@) { #{{{
 
        foreach my $required (qw{name url}) {
                if (! exists $params{$required}) {
-                       return "[[aggregate plugin missing $required parameter]]";
+                       error sprintf(gettext("missing %s parameter"), $required)
                }
        }
 
@@ -70,7 +228,7 @@ sub preprocess (@) { #{{{
        $feed->{name}=$name;
        $feed->{sourcepage}=$params{page};
        $feed->{url}=$params{url};
-       my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
+       my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
        $dir=~s/^\/+//;
        ($dir)=$dir=~/$config{wiki_file_regexp}/;
        $feed->{dir}=$dir;
@@ -78,11 +236,18 @@ sub preprocess (@) { #{{{
        $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
        $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
        $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
-       delete $feed->{remove};
+        if (exists $params{template}) {
+                $params{template}=~s/[^-_a-zA-Z0-9]+//g;
+        }
+        else {
+                $params{template} = "aggregatepost"
+        }
+       $feed->{template}=$params{template} . ".tmpl";
+       delete $feed->{unseen};
        $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
        $feed->{numposts}=0 unless defined $feed->{numposts};
        $feed->{newposts}=0 unless defined $feed->{newposts};
-       $feed->{message}="new feed" unless defined $feed->{message};
+       $feed->{message}=gettext("new feed") unless defined $feed->{message};
        $feed->{error}=0 unless defined $feed->{error};
        $feed->{tags}=[];
        while (@_) {
@@ -96,8 +261,9 @@ sub preprocess (@) { #{{{
        return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
               ($feed->{error} ? "<em>" : "").$feed->{message}.
               ($feed->{error} ? "</em>" : "").
-              " (".$feed->{numposts}." posts".
-              ($feed->{newposts} ? "; ".$feed->{newposts}." new" : "").
+              " (".$feed->{numposts}." ".gettext("posts").
+              ($feed->{newposts} ? "; ".$feed->{newposts}.
+                                   " ".gettext("new") : "").
               ")";
 } # }}}
 
@@ -107,14 +273,28 @@ sub delete (@) { #{{{
        # Remove feed data for removed pages.
        foreach my $file (@files) {
                my $page=pagename($file);
-               remove_feeds($page);
+               markunseen($page);
+       }
+} #}}}
+
+sub markunseen ($) { #{{{
+       my $page=shift;
+
+       foreach my $id (keys %feeds) {
+               if ($feeds{$id}->{sourcepage} eq $page) {
+                       $feeds{$id}->{unseen}=1;
+               }
        }
 } #}}}
 
+my $state_loaded=0;
+
 sub loadstate () { #{{{
+       return if $state_loaded;
+       $state_loaded=1;
        if (-e "$config{wikistatedir}/aggregate") {
-               open (IN, "$config{wikistatedir}/aggregate" ||
-                       die "$config{wikistatedir}/aggregate: $!");
+               open(IN, "$config{wikistatedir}/aggregate") ||
+                       die "$config{wikistatedir}/aggregate: $!";
                while (<IN>) {
                        $_=IkiWiki::possibly_foolish_untaint($_);
                        chomp;
@@ -146,25 +326,12 @@ sub loadstate () { #{{{
 } #}}}
 
 sub savestate () { #{{{
-       eval q{use HTML::Entities};
-       die $@ if $@;
-       open (OUT, ">$config{wikistatedir}/aggregate" ||
-               die "$config{wikistatedir}/aggregate: $!");
+       return unless $state_loaded;
+       garbage_collect();
+       my $newfile="$config{wikistatedir}/aggregate.new";
+       my $cleanup = sub { unlink($newfile) };
+       open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
        foreach my $data (values %feeds, values %guids) {
-               if ($data->{remove}) {
-                       if ($data->{name}) {
-                               foreach my $guid (values %guids) {
-                                       if ($guid->{feed} eq $data->{name}) {
-                                               $guid->{remove}=1;
-                                       }
-                               }
-                       }
-                       else {
-                               unlink pagefile($data->{page});
-                       }
-                       next;
-               }
-
                my @line;
                foreach my $field (keys %$data) {
                        if ($field eq "name" || $field eq "feed" ||
@@ -178,39 +345,169 @@ sub savestate () { #{{{
                                push @line, "$field=".$data->{$field};
                        }
                }
-               print OUT join(" ", @line)."\n";
+               print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
        }
-       close OUT;
+       close OUT || error("save $newfile: $!", $cleanup);
+       rename($newfile, "$config{wikistatedir}/aggregate") ||
+               error("rename $newfile: $!", $cleanup);
 } #}}}
 
-sub aggregate () { #{{{
-       eval q{use XML::Feed};
-       die $@ if $@;
-       eval q{use HTML::Entities};
-       die $@ if $@;
+sub garbage_collect () { #{{{
+       foreach my $name (keys %feeds) {
+               # remove any feeds that were not seen while building the pages
+               # that used to contain them
+               if ($feeds{$name}->{unseen}) {
+                       delete $feeds{$name};
+               }
+       }
 
+       foreach my $guid (values %guids) {
+               # any guid whose feed is gone should be removed
+               if (! exists $feeds{$guid->{feed}}) {
+                       unlink "$config{srcdir}/".htmlfn($guid->{page})
+                               if exists $guid->{page};
+                       delete $guids{$guid->{guid}};
+               }
+               # handle expired guids
+               elsif ($guid->{expired} && exists $guid->{page}) {
+                       unlink "$config{srcdir}/".htmlfn($guid->{page});
+                       delete $guid->{page};
+                       delete $guid->{md5};
+               }
+       }
+} #}}}
+
+sub mergestate () { #{{{
+       # Load the current state in from disk, and merge into it
+       # values from the state in memory that might have changed
+       # during aggregation.
+       my %myfeeds=%feeds;
+       my %myguids=%guids;
+       clearstate();
+       loadstate();
+
+       # All that can change in feed state during aggregation is a few
+       # fields.
+       foreach my $name (keys %myfeeds) {
+               if (exists $feeds{$name}) {
+                       foreach my $field (qw{message lastupdate numposts
+                                             newposts error}) {
+                               $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
+                       }
+               }
+       }
+
+       # New guids can be created during aggregation.
+       # It's also possible that guids were removed from the on-disk state
+       # while the aggregation was in process. That would only happen if
+       # their feed was also removed, so any removed guids added back here
+       # will be garbage collected later.
+       foreach my $guid (keys %myguids) {
+               if (! exists $guids{$guid}) {
+                       $guids{$guid}=$myguids{$guid};
+               }
+       }
+} #}}}
+
+sub clearstate () { #{{{
+       %feeds=();
+       %guids=();
+       $state_loaded=0;
+} #}}}
+
+sub expire () { #{{{
        foreach my $feed (values %feeds) {
-               next unless $config{rebuild} || 
-                       time - $feed->{lastupdate} >= $feed->{updateinterval};
+               next unless $feed->{expireage} || $feed->{expirecount};
+               my $count=0;
+               my %seen;
+               foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
+                                 grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
+                                 values %guids) {
+                       if ($feed->{expireage}) {
+                               my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
+                               if ($days_old > $feed->{expireage}) {
+                                       debug(sprintf(gettext("expiring %s (%s days old)"),
+                                               $item->{page}, int($days_old)));
+                                       $item->{expired}=1;
+                               }
+                       }
+                       elsif ($feed->{expirecount} &&
+                              $count >= $feed->{expirecount}) {
+                               debug(sprintf(gettext("expiring %s"), $item->{page}));
+                               $item->{expired}=1;
+                       }
+                       else {
+                               if (! $seen{$item->{page}}) {
+                                       $seen{$item->{page}}=1;
+                                       $count++;
+                               }
+                       }
+               }
+       }
+} #}}}
+
+sub needsaggregate () { #{{{
+       return values %feeds if $config{rebuild};
+       return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
+} #}}}
+
+sub aggregate (@) { #{{{
+       eval q{use XML::Feed};
+       error($@) if $@;
+       eval q{use URI::Fetch};
+       error($@) if $@;
+
+       foreach my $feed (@_) {
                $feed->{lastupdate}=time;
                $feed->{newposts}=0;
-               $IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
+               $feed->{message}=sprintf(gettext("processed ok at %s"),
+                       displaytime($feed->{lastupdate}));
+               $feed->{error}=0;
 
-               debug("checking feed ".$feed->{name}." ...");
+               debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
 
                if (! length $feed->{feedurl}) {
                        my @urls=XML::Feed->find_feeds($feed->{url});
                        if (! @urls) {
-                               $feed->{message}="could not find feed at ".$feed->{feedurl};
+                               $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
                                $feed->{error}=1;
                                debug($feed->{message});
                                next;
                        }
                        $feed->{feedurl}=pop @urls;
                }
-               my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))};
+               my $res=URI::Fetch->fetch($feed->{feedurl});
+               if (! $res) {
+                       $feed->{message}=URI::Fetch->errstr;
+                       $feed->{error}=1;
+                       debug($feed->{message});
+                       next;
+               }
+               if ($res->status == URI::Fetch::URI_GONE()) {
+                       $feed->{message}=gettext("feed not found");
+                       $feed->{error}=1;
+                       debug($feed->{message});
+                       next;
+               }
+               my $content=$res->content;
+               my $f=eval{XML::Feed->parse(\$content)};
+               if ($@) {
+                       # One common cause of XML::Feed crashing is a feed
+                       # that contains invalid UTF-8 sequences. Convert
+                       # feed to ascii to try to work around.
+                       $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
+                       $content=Encode::decode_utf8($content, 0);
+                       $f=eval{XML::Feed->parse(\$content)};
+               }
+               if ($@) {
+                       # Another possibility is badly escaped entities.
+                       $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
+                       $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
+                       $content=Encode::decode_utf8($content, 0);
+                       $f=eval{XML::Feed->parse(\$content)};
+               }
                if ($@) {
-                       $feed->{message}="feed crashed XML::Feed! $@";
+                       $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
                        $feed->{error}=1;
                        debug($feed->{message});
                        next;
@@ -223,22 +520,23 @@ sub aggregate () { #{{{
                }
 
                foreach my $entry ($f->entries) {
+                       my $content=$content=$entry->content->body;
+                       # atom feeds may have no content, only a summary
+                       if (! defined $content && ref $entry->summary) {
+                               $content=$entry->summary->body;
+                       }
+
                        add_page(
                                feed => $feed,
+                               copyright => $f->copyright,
                                title => defined $entry->title ? decode_entities($entry->title) : "untitled",
                                link => $entry->link,
-                               content => $entry->content->body,
-                               guid => defined $entry->id ? $entry->id : time."_".$feed->name,
+                               content => defined $content ? $content : "",
+                               guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
                                ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
                        );
                }
-
-               $feed->{message}="processed ok at ".
-                       displaytime($feed->{lastupdate});
-               $feed->{error}=0;
        }
-
-       # TODO: expiry
 } #}}}
 
 sub add_page (@) { #{{{
@@ -250,6 +548,7 @@ sub add_page (@) { #{{{
        if (exists $guids{$params{guid}}) {
                # updating an existing post
                $guid=$guids{$params{guid}};
+               return if $guid->{expired};
        }
        else {
                # new post
@@ -260,7 +559,7 @@ sub add_page (@) { #{{{
                $feed->{newposts}++;
 
                # assign it an unused page
-               my $page=IkiWiki::titlepage($params{title});
+               my $page=titlepage($params{title});
                # escape slashes and periods in title so it doesn't specify
                # directory name or trigger ".." disallowing code.
                $page=~s!([/.])!"__".ord($1)."__"!eg;
@@ -271,11 +570,24 @@ sub add_page (@) { #{{{
                }
                my $c="";
                while (exists $IkiWiki::pagecase{lc $page.$c} ||
-                      -e pagefile($page.$c)) {
+                      -e "$config{srcdir}/".htmlfn($page.$c)) {
                        $c++
                }
+
+               # Make sure that the file name isn't too long. 
+               # NB: This doesn't check for path length limits.
+               my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
+               if (defined $max && length(htmlfn($page)) >= $max) {
+                       $c="";
+                       $page=$feed->{dir}."/item";
+                       while (exists $IkiWiki::pagecase{lc $page.$c} ||
+                              -e "$config{srcdir}/".htmlfn($page.$c)) {
+                               $c++
+                       }
+               }
+
                $guid->{page}=$page;
-               debug("creating new page $page");
+               debug(sprintf(gettext("creating new page %s"), $page));
        }
        $guid->{feed}=$feed->{name};
        
@@ -283,29 +595,40 @@ sub add_page (@) { #{{{
        # to avoid unneccessary rebuilding. The mtime from rss cannot be
        # trusted; let's use a digest.
        eval q{use Digest::MD5 'md5_hex'};
+       error($@) if $@;
        require Encode;
        my $digest=md5_hex(Encode::encode_utf8($params{content}));
        return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
        $guid->{md5}=$digest;
 
        # Create the page.
-       my $template=template("aggregatepost.tmpl", blind_cache => 1);
+       my $template=template($feed->{template}, blind_cache => 1);
        $template->param(title => $params{title})
                if defined $params{title} && length($params{title});
        $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
        $template->param(name => $feed->{name});
        $template->param(url => $feed->{url});
+       $template->param(copyright => $params{copyright})
+               if defined $params{copyright} && length $params{copyright};
        $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
                if defined $params{link};
        if (ref $feed->{tags}) {
                $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
        }
-       writefile(htmlpage($guid->{page}), $config{srcdir},
+       writefile(htmlfn($guid->{page}), $config{srcdir},
                $template->output);
 
-       # Set the mtime, this lets the build process get the right creation
-       # time on record for the new page.
-       utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime;
+       if (defined $mtime && $mtime <= time) {
+               # Set the mtime, this lets the build process get the right
+               # creation time on record for the new page.
+               utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
+               # Store it in pagectime for expiry code to use also.
+               $IkiWiki::pagectime{$guid->{page}}=$mtime;
+       }
+       else {
+               # Dummy value for expiry code.
+               $IkiWiki::pagectime{$guid->{page}}=time;
+       }
 } #}}}
 
 sub htmlescape ($) { #{{{
@@ -360,22 +683,30 @@ sub htmlabs ($$) { #{{{
        return $ret;
 } #}}}
 
-sub remove_feeds () { #{{{
-       my $page=shift;
+sub htmlfn ($) { #{{{
+       return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
+} #}}}
 
-       my %removed;
-       foreach my $id (keys %feeds) {
-               if ($feeds{$id}->{sourcepage} eq $page) {
-                       $feeds{$id}->{remove}=1;
-                       $removed{$id}=1;
-               }
+my $aggregatelock;
+
+sub lockaggregate () { #{{{
+       # Take an exclusive lock to prevent multiple concurrent aggregators.
+       # Returns true if the lock was aquired.
+       if (! -d $config{wikistatedir}) {
+               mkdir($config{wikistatedir});
        }
+       open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
+               error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
+       if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
+               close($aggregatelock) || error("failed closing aggregatelock: $!");
+               return 0;
+       }
+       return 1;
 } #}}}
 
-sub pagefile ($) { #{{{
-       my $page=shift;
-
-       return "$config{srcdir}/".htmlpage($page);
+sub unlockaggregate () { #{{{
+       return close($aggregatelock) if $aggregatelock;
+       return;
 } #}}}
 
 1