sub import { #{{{
hook(type => "getopt", id => "aggregate", call => \&getopt);
hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
- hook(type => "filter", id => "aggregate", call => \&filter);
+ hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
hook(type => "preprocess", id => "aggregate", call => \&preprocess);
hook(type => "delete", id => "aggregate", call => \&delete);
hook(type => "savestate", id => "aggregate", call => \&savestate);
debug("wiki is locked by another process, not aggregating");
exit 1;
}
-
- loadstate();
- IkiWiki::loadindex();
- aggregate();
- expire();
- savestate();
- clearstate();
-
+
+ # Fork a child process to handle the aggregation.
+ # The parent process will then handle building the result.
+ # This avoids messy code to clear state accumulated while
+ # aggregating.
+ defined(my $pid = fork) or error("Can't fork: $!");
+ if (! $pid) {
+ loadstate();
+ IkiWiki::loadindex();
+ aggregate();
+ expire();
+ savestate();
+ exit 0;
+ }
+ waitpid($pid,0);
+ if ($?) {
+ error "aggregation failed with code $?";
+ }
+
IkiWiki::unlockwiki();
}
} #}}}
-sub filter (@) { #{{{
- my %params=@_;
- my $page=$params{page};
-
+sub needsbuild (@) { #{{{
+ my $needsbuild=shift;
+
loadstate(); # if not already loaded
- # Mark all feeds originating on this page as removable;
- # preprocess will unmark those that still exist.
- remove_feeds($page);
- return $params{content};
+ foreach my $feed (values %feeds) {
+ if (grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
+ # Mark all feeds originating on this page as removable;
+ # preprocess will unmark those that still exist.
+ remove_feeds($feed->{sourcepage});
+ }
+ }
} # }}}
sub preprocess (@) { #{{{
my $state_loaded=0;
sub loadstate () { #{{{
return if $state_loaded;
+ $state_loaded=1;
if (-e "$config{wikistatedir}/aggregate") {
open(IN, "$config{wikistatedir}/aggregate") ||
die "$config{wikistatedir}/aggregate: $!";
}
close IN;
-
- $state_loaded=1;
}
} #}}}
sub savestate () { #{{{
+ return unless $state_loaded;
eval q{use HTML::Entities};
error($@) if $@;
my $newfile="$config{wikistatedir}/aggregate.new";
- # TODO: This cleanup function could use improvement. Any newly
- # aggregated files are left behind unrecorded, and should be deleted.
my $cleanup = sub { unlink($newfile) };
open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) {
}
}
else {
- unlink pagefile($data->{page});
+ unlink pagefile($data->{page})
+ if exists $data->{page};
}
next;
}
error("rename $newfile: $!", $cleanup);
} #}}}
-sub clearstate () { #{{{
- %feeds=();
- %guids=();
- $state_loaded=0;
-} #}}}
-
sub expire () { #{{{
foreach my $feed (values %feeds) {
next unless $feed->{expireage} || $feed->{expirecount};
my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
if ($days_old > $feed->{expireage}) {
debug(sprintf(gettext("expiring %s (%s days old)"),
- $item->{page}, $days_old));
+ $item->{page}, int($days_old)));
$item->{expired}=1;
}
}
$content=Encode::decode_utf8($content);
$f=eval{XML::Feed->parse(\$content)};
}
+ if ($@) {
+ # Another possibility is badly escaped entities.
+ $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
+ $content=~s/\&(?!amp)(\w+);/&$1;/g;
+ $content=Encode::decode_utf8($content);
+ $f=eval{XML::Feed->parse(\$content)};
+ }
if ($@) {
$feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
$feed->{error}=1;
foreach my $entry ($f->entries) {
add_page(
feed => $feed,
+ copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
link => $entry->link,
content => defined $entry->content->body ? $entry->content->body : "",
$template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
$template->param(name => $feed->{name});
$template->param(url => $feed->{url});
+ $template->param(copyright => $params{copyright})
+ if defined $params{copyright} && length $params{copyright};
$template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
if defined $params{link};
if (ref $feed->{tags}) {
# Set the mtime, this lets the build process get the right creation
# time on record for the new page.
- utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime;
+ utime $mtime, $mtime, pagefile($guid->{page})
+ if defined $mtime && $mtime <= time;
} #}}}
sub htmlescape ($) { #{{{