]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blob - IkiWiki/Plugin/aggregate.pm
fix test when comments_closed_pagespec is empty
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
1 #!/usr/bin/perl
2 # Feed aggregation plugin.
3 package IkiWiki::Plugin::aggregate;
5 use warnings;
6 use strict;
7 use IkiWiki 2.00;
8 use HTML::Parser;
9 use HTML::Tagset;
10 use HTML::Entities;
11 use URI;
12 use open qw{:utf8 :std};
14 my %feeds;
15 my %guids;
17 sub import {
18         hook(type => "getopt", id => "aggregate", call => \&getopt);
19         hook(type => "getsetup", id => "aggregate", call => \&getsetup);
20         hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
21         hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
22         hook(type => "preprocess", id => "aggregate", call => \&preprocess);
23         hook(type => "delete", id => "aggregate", call => \&delete);
24         hook(type => "savestate", id => "aggregate", call => \&savestate);
25         hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
26         if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
27                 hook(type => "cgi", id => "aggregate", call => \&cgi);
28         }
29 }
31 sub getopt () {
32         eval q{use Getopt::Long};
33         error($@) if $@;
34         Getopt::Long::Configure('pass_through');
35         GetOptions(
36                 "aggregate" => \$config{aggregate},
37                 "aggregateinternal!" => \$config{aggregateinternal},
38         );
39 }
41 sub getsetup () {
42         return
43                 plugin => {
44                         safe => 1,
45                         rebuild => undef,
46                 },
47                 aggregateinternal => {
48                         type => "boolean",
49                         example => 0,
50                         description => "enable aggregation to internal pages?",
51                         safe => 0, # enabling needs manual transition
52                         rebuild => 0,
53                 },
54                 aggregate_webtrigger => {
55                         type => "boolean",
56                         example => 0,
57                         description => "allow aggregation to be triggered via the web?",
58                         safe => 1,
59                         rebuild => 0,
60                 },
61 }
63 sub checkconfig () {
64         if ($config{aggregate} && ! ($config{post_commit} && 
65                                      IkiWiki::commit_hook_enabled())) {
66                 launchaggregation();
67         }
68 }
70 sub cgi ($) {
71         my $cgi=shift;
73         if (defined $cgi->param('do') &&
74             $cgi->param("do") eq "aggregate_webtrigger") {
75                 $|=1;
76                 print "Content-Type: text/plain\n\n";
77                 $config{cgi}=0;
78                 $config{verbose}=1;
79                 $config{syslog}=0;
80                 print gettext("Aggregation triggered via web.")."\n\n";
81                 if (launchaggregation()) {
82                         IkiWiki::lockwiki();
83                         IkiWiki::loadindex();
84                         require IkiWiki::Render;
85                         IkiWiki::refresh();
86                         IkiWiki::saveindex();
87                 }
88                 else {
89                         print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
90                 }
91                 exit 0;
92         }
93 }
95 sub launchaggregation () {
96         # See if any feeds need aggregation.
97         loadstate();
98         my @feeds=needsaggregate();
99         return unless @feeds;
100         if (! lockaggregate()) {
101                 debug("an aggregation process is already running");
102                 return;
103         }
104         # force a later rebuild of source pages
105         $IkiWiki::forcerebuild{$_->{sourcepage}}=1
106                 foreach @feeds;
108         # Fork a child process to handle the aggregation.
109         # The parent process will then handle building the
110         # result. This avoids messy code to clear state
111         # accumulated while aggregating.
112         defined(my $pid = fork) or error("Can't fork: $!");
113         if (! $pid) {
114                 IkiWiki::loadindex();
115                 # Aggregation happens without the main wiki lock
116                 # being held. This allows editing pages etc while
117                 # aggregation is running.
118                 aggregate(@feeds);
120                 IkiWiki::lockwiki;
121                 # Merge changes, since aggregation state may have
122                 # changed on disk while the aggregation was happening.
123                 mergestate();
124                 expire();
125                 savestate();
126                 IkiWiki::unlockwiki;
127                 exit 0;
128         }
129         waitpid($pid,0);
130         if ($?) {
131                 error "aggregation failed with code $?";
132         }
134         clearstate();
135         unlockaggregate();
137         return 1;
140 #  Pages with extension _aggregated have plain html markup, pass through.
141 sub htmlize (@) {
142         my %params=@_;
143         return $params{content};
146 # Used by ikiwiki-transition aggregateinternal.
147 sub migrate_to_internal {
148         if (! lockaggregate()) {
149                 error("an aggregation process is currently running");
150         }
152         IkiWiki::lockwiki();
153         loadstate();
154         $config{verbose}=1;
156         foreach my $data (values %guids) {
157                 next unless $data->{page};
158                 next if $data->{expired};
159                 
160                 $config{aggregateinternal} = 0;
161                 my $oldname = "$config{srcdir}/".htmlfn($data->{page});
162                 my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
163                 
164                 $config{aggregateinternal} = 1;
165                 my $newname = "$config{srcdir}/".htmlfn($data->{page});
166                 
167                 debug "moving $oldname -> $newname";
168                 if (-e $newname) {
169                         if (-e $oldname) {
170                                 error("$newname already exists");
171                         }
172                         else {
173                                 debug("already renamed to $newname?");
174                         }
175                 }
176                 elsif (-e $oldname) {
177                         rename($oldname, $newname) || error("$!");
178                 }
179                 else {
180                         debug("$oldname not found");
181                 }
182                 if (-e $oldoutput) {
183                         require IkiWiki::Render;
184                         debug("removing output file $oldoutput");
185                         IkiWiki::prune($oldoutput);
186                 }
187         }
188         
189         savestate();
190         IkiWiki::unlockwiki;
191         
192         unlockaggregate();
195 sub needsbuild (@) {
196         my $needsbuild=shift;
197         
198         loadstate();
200         foreach my $feed (values %feeds) {
201                 if (exists $pagesources{$feed->{sourcepage}} && 
202                     grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
203                         # Mark all feeds originating on this page as 
204                         # not yet seen; preprocess will unmark those that
205                         # still exist.
206                         markunseen($feed->{sourcepage});
207                 }
208         }
211 sub preprocess (@) {
212         my %params=@_;
214         foreach my $required (qw{name url}) {
215                 if (! exists $params{$required}) {
216                         error sprintf(gettext("missing %s parameter"), $required)
217                 }
218         }
220         my $feed={};
221         my $name=$params{name};
222         if (exists $feeds{$name}) {
223                 $feed=$feeds{$name};
224         }
225         else {
226                 $feeds{$name}=$feed;
227         }
228         $feed->{name}=$name;
229         $feed->{sourcepage}=$params{page};
230         $feed->{url}=$params{url};
231         my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
232         $dir=~s/^\/+//;
233         ($dir)=$dir=~/$config{wiki_file_regexp}/;
234         $feed->{dir}=$dir;
235         $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : "";
236         $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
237         $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
238         $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
239         if (exists $params{template}) {
240                 $params{template}=~s/[^-_a-zA-Z0-9]+//g;
241         }
242         else {
243                 $params{template} = "aggregatepost"
244         }
245         $feed->{template}=$params{template} . ".tmpl";
246         delete $feed->{unseen};
247         $feed->{lastupdate}=0 unless defined $feed->{lastupdate};
248         $feed->{numposts}=0 unless defined $feed->{numposts};
249         $feed->{newposts}=0 unless defined $feed->{newposts};
250         $feed->{message}=gettext("new feed") unless defined $feed->{message};
251         $feed->{error}=0 unless defined $feed->{error};
252         $feed->{tags}=[];
253         while (@_) {
254                 my $key=shift;
255                 my $value=shift;
256                 if ($key eq 'tag') {
257                         push @{$feed->{tags}}, $value;
258                 }
259         }
261         return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
262                ($feed->{error} ? "<em>" : "").$feed->{message}.
263                ($feed->{error} ? "</em>" : "").
264                " (".$feed->{numposts}." ".gettext("posts").
265                ($feed->{newposts} ? "; ".$feed->{newposts}.
266                                     " ".gettext("new") : "").
267                ")";
270 sub delete (@) {
271         my @files=@_;
273         # Remove feed data for removed pages.
274         foreach my $file (@files) {
275                 my $page=pagename($file);
276                 markunseen($page);
277         }
280 sub markunseen ($) {
281         my $page=shift;
283         foreach my $id (keys %feeds) {
284                 if ($feeds{$id}->{sourcepage} eq $page) {
285                         $feeds{$id}->{unseen}=1;
286                 }
287         }
290 my $state_loaded=0;
292 sub loadstate () {
293         return if $state_loaded;
294         $state_loaded=1;
295         if (-e "$config{wikistatedir}/aggregate") {
296                 open(IN, "$config{wikistatedir}/aggregate") ||
297                         die "$config{wikistatedir}/aggregate: $!";
298                 while (<IN>) {
299                         $_=IkiWiki::possibly_foolish_untaint($_);
300                         chomp;
301                         my $data={};
302                         foreach my $i (split(/ /, $_)) {
303                                 my ($field, $val)=split(/=/, $i, 2);
304                                 if ($field eq "name" || $field eq "feed" ||
305                                     $field eq "guid" || $field eq "message") {
306                                         $data->{$field}=decode_entities($val, " \t\n");
307                                 }
308                                 elsif ($field eq "tag") {
309                                         push @{$data->{tags}}, $val;
310                                 }
311                                 else {
312                                         $data->{$field}=$val;
313                                 }
314                         }
315                         
316                         if (exists $data->{name}) {
317                                 $feeds{$data->{name}}=$data;
318                         }
319                         elsif (exists $data->{guid}) {
320                                 $guids{$data->{guid}}=$data;
321                         }
322                 }
324                 close IN;
325         }
328 sub savestate () {
329         return unless $state_loaded;
330         garbage_collect();
331         my $newfile="$config{wikistatedir}/aggregate.new";
332         my $cleanup = sub { unlink($newfile) };
333         open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
334         foreach my $data (values %feeds, values %guids) {
335                 my @line;
336                 foreach my $field (keys %$data) {
337                         if ($field eq "name" || $field eq "feed" ||
338                             $field eq "guid" || $field eq "message") {
339                                 push @line, "$field=".encode_entities($data->{$field}, " \t\n");
340                         }
341                         elsif ($field eq "tags") {
342                                 push @line, "tag=$_" foreach @{$data->{tags}};
343                         }
344                         else {
345                                 push @line, "$field=".$data->{$field};
346                         }
347                 }
348                 print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
349         }
350         close OUT || error("save $newfile: $!", $cleanup);
351         rename($newfile, "$config{wikistatedir}/aggregate") ||
352                 error("rename $newfile: $!", $cleanup);
355 sub garbage_collect () {
356         foreach my $name (keys %feeds) {
357                 # remove any feeds that were not seen while building the pages
358                 # that used to contain them
359                 if ($feeds{$name}->{unseen}) {
360                         delete $feeds{$name};
361                 }
362         }
364         foreach my $guid (values %guids) {
365                 # any guid whose feed is gone should be removed
366                 if (! exists $feeds{$guid->{feed}}) {
367                         unlink "$config{srcdir}/".htmlfn($guid->{page})
368                                 if exists $guid->{page};
369                         delete $guids{$guid->{guid}};
370                 }
371                 # handle expired guids
372                 elsif ($guid->{expired} && exists $guid->{page}) {
373                         unlink "$config{srcdir}/".htmlfn($guid->{page});
374                         delete $guid->{page};
375                         delete $guid->{md5};
376                 }
377         }
380 sub mergestate () {
381         # Load the current state in from disk, and merge into it
382         # values from the state in memory that might have changed
383         # during aggregation.
384         my %myfeeds=%feeds;
385         my %myguids=%guids;
386         clearstate();
387         loadstate();
389         # All that can change in feed state during aggregation is a few
390         # fields.
391         foreach my $name (keys %myfeeds) {
392                 if (exists $feeds{$name}) {
393                         foreach my $field (qw{message lastupdate numposts
394                                               newposts error}) {
395                                 $feeds{$name}->{$field}=$myfeeds{$name}->{$field};
396                         }
397                 }
398         }
400         # New guids can be created during aggregation.
401         # It's also possible that guids were removed from the on-disk state
402         # while the aggregation was in process. That would only happen if
403         # their feed was also removed, so any removed guids added back here
404         # will be garbage collected later.
405         foreach my $guid (keys %myguids) {
406                 if (! exists $guids{$guid}) {
407                         $guids{$guid}=$myguids{$guid};
408                 }
409         }
412 sub clearstate () {
413         %feeds=();
414         %guids=();
415         $state_loaded=0;
418 sub expire () {
419         foreach my $feed (values %feeds) {
420                 next unless $feed->{expireage} || $feed->{expirecount};
421                 my $count=0;
422                 my %seen;
423                 foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
424                                   grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
425                                   values %guids) {
426                         if ($feed->{expireage}) {
427                                 my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
428                                 if ($days_old > $feed->{expireage}) {
429                                         debug(sprintf(gettext("expiring %s (%s days old)"),
430                                                 $item->{page}, int($days_old)));
431                                         $item->{expired}=1;
432                                 }
433                         }
434                         elsif ($feed->{expirecount} &&
435                                $count >= $feed->{expirecount}) {
436                                 debug(sprintf(gettext("expiring %s"), $item->{page}));
437                                 $item->{expired}=1;
438                         }
439                         else {
440                                 if (! $seen{$item->{page}}) {
441                                         $seen{$item->{page}}=1;
442                                         $count++;
443                                 }
444                         }
445                 }
446         }
449 sub needsaggregate () {
450         return values %feeds if $config{rebuild};
451         return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
454 sub aggregate (@) {
455         eval q{use XML::Feed};
456         error($@) if $@;
457         eval q{use URI::Fetch};
458         error($@) if $@;
460         foreach my $feed (@_) {
461                 $feed->{lastupdate}=time;
462                 $feed->{newposts}=0;
463                 $feed->{message}=sprintf(gettext("last checked %s"),
464                         displaytime($feed->{lastupdate}));
465                 $feed->{error}=0;
467                 debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
469                 if (! length $feed->{feedurl}) {
470                         my @urls=XML::Feed->find_feeds($feed->{url});
471                         if (! @urls) {
472                                 $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
473                                 $feed->{error}=1;
474                                 debug($feed->{message});
475                                 next;
476                         }
477                         $feed->{feedurl}=pop @urls;
478                 }
479                 my $res=URI::Fetch->fetch($feed->{feedurl});
480                 if (! $res) {
481                         $feed->{message}=URI::Fetch->errstr;
482                         $feed->{error}=1;
483                         debug($feed->{message});
484                         next;
485                 }
486                 if ($res->status == URI::Fetch::URI_GONE()) {
487                         $feed->{message}=gettext("feed not found");
488                         $feed->{error}=1;
489                         debug($feed->{message});
490                         next;
491                 }
492                 my $content=$res->content;
493                 my $f=eval{XML::Feed->parse(\$content)};
494                 if ($@) {
495                         # One common cause of XML::Feed crashing is a feed
496                         # that contains invalid UTF-8 sequences. Convert
497                         # feed to ascii to try to work around.
498                         $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
499                         $f=eval {
500                                 $content=Encode::decode_utf8($content, 0);
501                                 XML::Feed->parse(\$content)
502                         };
503                 }
504                 if ($@) {
505                         # Another possibility is badly escaped entities.
506                         $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
507                         $content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
508                         $f=eval {
509                                 $content=Encode::decode_utf8($content, 0);
510                                 XML::Feed->parse(\$content)
511                         };
512                 }
513                 if ($@) {
514                         $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
515                         $feed->{error}=1;
516                         debug($feed->{message});
517                         next;
518                 }
519                 if (! $f) {
520                         $feed->{message}=XML::Feed->errstr;
521                         $feed->{error}=1;
522                         debug($feed->{message});
523                         next;
524                 }
526                 foreach my $entry ($f->entries) {
527                         my $c=$entry->content;
528                         # atom feeds may have no content, only a summary
529                         if (! defined $c && ref $entry->summary) {
530                                 $c=$entry->summary;
531                         }
533                         add_page(
534                                 feed => $feed,
535                                 copyright => $f->copyright,
536                                 title => defined $entry->title ? decode_entities($entry->title) : "untitled",
537                                 link => $entry->link,
538                                 content => (defined $c && defined $c->body) ? $c->body : "",
539                                 guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
540                                 ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
541                                 base => (defined $c && $c->can("base")) ? $c->base : undef,
542                         );
543                 }
544         }
547 sub add_page (@) {
548         my %params=@_;
549         
550         my $feed=$params{feed};
551         my $guid={};
552         my $mtime;
553         if (exists $guids{$params{guid}}) {
554                 # updating an existing post
555                 $guid=$guids{$params{guid}};
556                 return if $guid->{expired};
557         }
558         else {
559                 # new post
560                 $guid->{guid}=$params{guid};
561                 $guids{$params{guid}}=$guid;
562                 $mtime=$params{ctime};
563                 $feed->{numposts}++;
564                 $feed->{newposts}++;
566                 # assign it an unused page
567                 my $page=titlepage($params{title});
568                 # escape slashes and periods in title so it doesn't specify
569                 # directory name or trigger ".." disallowing code.
570                 $page=~s!([/.])!"__".ord($1)."__"!eg;
571                 $page=$feed->{dir}."/".$page;
572                 ($page)=$page=~/$config{wiki_file_regexp}/;
573                 if (! defined $page || ! length $page) {
574                         $page=$feed->{dir}."/item";
575                 }
576                 my $c="";
577                 while (exists $IkiWiki::pagecase{lc $page.$c} ||
578                        -e "$config{srcdir}/".htmlfn($page.$c)) {
579                         $c++
580                 }
582                 # Make sure that the file name isn't too long. 
583                 # NB: This doesn't check for path length limits.
584                 my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
585                 if (defined $max && length(htmlfn($page)) >= $max) {
586                         $c="";
587                         $page=$feed->{dir}."/item";
588                         while (exists $IkiWiki::pagecase{lc $page.$c} ||
589                                -e "$config{srcdir}/".htmlfn($page.$c)) {
590                                 $c++
591                         }
592                 }
594                 $guid->{page}=$page;
595                 debug(sprintf(gettext("creating new page %s"), $page));
596         }
597         $guid->{feed}=$feed->{name};
598         
599         # To write or not to write? Need to avoid writing unchanged pages
600         # to avoid unneccessary rebuilding. The mtime from rss cannot be
601         # trusted; let's use a digest.
602         eval q{use Digest::MD5 'md5_hex'};
603         error($@) if $@;
604         require Encode;
605         my $digest=md5_hex(Encode::encode_utf8($params{content}));
606         return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
607         $guid->{md5}=$digest;
609         # Create the page.
610         my $template=template($feed->{template}, blind_cache => 1);
611         $template->param(title => $params{title})
612                 if defined $params{title} && length($params{title});
613         $template->param(content => wikiescape(htmlabs($params{content},
614                 defined $params{base} ? $params{base} : $feed->{feedurl})));
615         $template->param(name => $feed->{name});
616         $template->param(url => $feed->{url});
617         $template->param(copyright => $params{copyright})
618                 if defined $params{copyright} && length $params{copyright};
619         $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
620                 if defined $params{link};
621         if (ref $feed->{tags}) {
622                 $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
623         }
624         writefile(htmlfn($guid->{page}), $config{srcdir},
625                 $template->output);
627         if (defined $mtime && $mtime <= time) {
628                 # Set the mtime, this lets the build process get the right
629                 # creation time on record for the new page.
630                 utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
631                 # Store it in pagectime for expiry code to use also.
632                 $IkiWiki::pagectime{$guid->{page}}=$mtime;
633         }
634         else {
635                 # Dummy value for expiry code.
636                 $IkiWiki::pagectime{$guid->{page}}=time;
637         }
640 sub wikiescape ($) {
641         # escape accidental wikilinks and preprocessor stuff
642         return encode_entities(shift, '\[\]');
645 sub urlabs ($$) {
646         my $url=shift;
647         my $urlbase=shift;
649         URI->new_abs($url, $urlbase)->as_string;
652 sub htmlabs ($$) {
653         # Convert links in html from relative to absolute.
654         # Note that this is a heuristic, which is not specified by the rss
655         # spec and may not be right for all feeds. Also, see Debian
656         # bug #381359.
657         my $html=shift;
658         my $urlbase=shift;
660         my $ret="";
661         my $p = HTML::Parser->new(api_version => 3);
662         $p->handler(default => sub { $ret.=join("", @_) }, "text");
663         $p->handler(start => sub {
664                 my ($tagname, $pos, $text) = @_;
665                 if (ref $HTML::Tagset::linkElements{$tagname}) {
666                         while (4 <= @$pos) {
667                                 # use attribute sets from right to left
668                                 # to avoid invalidating the offsets
669                                 # when replacing the values
670                                 my($k_offset, $k_len, $v_offset, $v_len) =
671                                         splice(@$pos, -4);
672                                 my $attrname = lc(substr($text, $k_offset, $k_len));
673                                 next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}};
674                                 next unless $v_offset; # 0 v_offset means no value
675                                 my $v = substr($text, $v_offset, $v_len);
676                                 $v =~ s/^([\'\"])(.*)\1$/$2/;
677                                 my $new_v=urlabs($v, $urlbase);
678                                 $new_v =~ s/\"/&quot;/g; # since we quote with ""
679                                 substr($text, $v_offset, $v_len) = qq("$new_v");
680                         }
681                 }
682                 $ret.=$text;
683         }, "tagname, tokenpos, text");
684         $p->parse($html);
685         $p->eof;
687         return $ret;
690 sub htmlfn ($) {
691         return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
694 my $aggregatelock;
696 sub lockaggregate () {
697         # Take an exclusive lock to prevent multiple concurrent aggregators.
698         # Returns true if the lock was aquired.
699         if (! -d $config{wikistatedir}) {
700                 mkdir($config{wikistatedir});
701         }
702         open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
703                 error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
704         if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
705                 close($aggregatelock) || error("failed closing aggregatelock: $!");
706                 return 0;
707         }
708         return 1;
711 sub unlockaggregate () {
712         return close($aggregatelock) if $aggregatelock;
713         return;