foreach my $required (qw{name url}) {
if (! exists $params{$required}) {
- return "[[".sprintf(gettext("aggregate plugin missing %s parameter"), $required)."]]";
+ return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
}
}
sub savestate () { #{{{
eval q{use HTML::Entities};
error($@) if $@;
- open (OUT, ">$config{wikistatedir}/aggregate" ||
- die "$config{wikistatedir}/aggregate: $!");
+ my $newfile="$config{wikistatedir}/aggregate.new";
+ # TODO: This cleanup function could use improvement. Any newly
+ # aggregated files are left behind unrecorded, and should be deleted.
+ my $cleanup = sub { unlink($newfile) };
+ open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) {
if ($data->{remove}) {
if ($data->{name}) {
push @line, "$field=".$data->{$field};
}
}
- print OUT join(" ", @line)."\n";
+ print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
}
- close OUT;
+ close OUT || error("save $newfile: $!", $cleanup);
+ rename($newfile, "$config{wikistatedir}/aggregate") ||
+ error("rename $newfile: $!", $cleanup);
} #}}}
sub expire () { #{{{
);
}
- $feed->{message}=sprintf(gettext("processed ok at "),
+ $feed->{message}=sprintf(gettext("processed ok at %s"),
displaytime($feed->{lastupdate}));
$feed->{error}=0;
}
my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
if (defined $max && length(htmlpage($page)) >= $max) {
$c="";
- $page="item";
+ $page=$feed->{dir}."/item";
while (exists $IkiWiki::pagecase{lc $page.$c} ||
-e pagefile($page.$c)) {
$c++