X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/5608aa078e5a11ff6fbe1adda65a209017b0b630..88576fd762962c6f287850c9a4553547e2938974:/IkiWiki/Plugin/aggregate.pm?ds=sidebyside diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index d11283f0a..ae86d7979 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -31,15 +31,16 @@ sub getopt () { #{{{ } #}}} sub checkconfig () { #{{{ - IkiWiki::lockwiki(); + my $nolock=($config{post_commit} && ! IkiWiki::commit_hook_enabled()); + IkiWiki::lockwiki() unless $nolock; loadstate(); - if ($config{aggregate}) { + if ($config{aggregate} && ! $nolock) { IkiWiki::loadindex(); aggregate(); expire(); savestate(); } - IkiWiki::unlockwiki(); + IkiWiki::unlockwiki() unless $nolock; } #}}} sub filter (@) { #{{{ @@ -58,7 +59,7 @@ sub preprocess (@) { #{{{ foreach my $required (qw{name url}) { if (! exists $params{$required}) { - return "[[".sprintf(gettext("aggregate plugin missing %s parameter"), $required)."]]"; + return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]"; } } @@ -153,8 +154,11 @@ sub loadstate () { #{{{ sub savestate () { #{{{ eval q{use HTML::Entities}; error($@) if $@; - open (OUT, ">$config{wikistatedir}/aggregate" || - die "$config{wikistatedir}/aggregate: $!"); + my $newfile="$config{wikistatedir}/aggregate.new"; + # TODO: This cleanup function could use improvement. Any newly + # aggregated files are left behind unrecorded, and should be deleted. + my $cleanup = sub { unlink($newfile) }; + open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); foreach my $data (values %feeds, values %guids) { if ($data->{remove}) { if ($data->{name}) { @@ -188,9 +192,11 @@ sub savestate () { #{{{ push @line, "$field=".$data->{$field}; } } - print OUT join(" ", @line)."\n"; + print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup); } - close OUT; + close OUT || error("save $newfile: $!", $cleanup); + rename($newfile, "$config{wikistatedir}/aggregate") || + error("rename $newfile: $!", $cleanup); } #}}} sub expire () { #{{{ @@ -238,7 +244,7 @@ sub aggregate () { #{{{ if (! length $feed->{feedurl}) { my @urls=XML::Feed->find_feeds($feed->{url}); if (! @urls) { - $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{feedurl}); + $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url}); $feed->{error}=1; debug($feed->{message}); next; @@ -270,7 +276,7 @@ sub aggregate () { #{{{ ); } - $feed->{message}=sprintf(gettext("processed ok at "), + $feed->{message}=sprintf(gettext("processed ok at %s"), displaytime($feed->{lastupdate})); $feed->{error}=0; } @@ -315,9 +321,9 @@ sub add_page (@) { #{{{ # NB: This doesn't check for path length limits. eval q{use POSIX}; my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX); - if (defined $max && length(htmlpage($page)) >= $max) { + if (defined $max && length(htmlfn($page)) >= $max) { $c=""; - $page="item"; + $page=$feed->{dir}."/item"; while (exists $IkiWiki::pagecase{lc $page.$c} || -e pagefile($page.$c)) { $c++ @@ -351,7 +357,7 @@ sub add_page (@) { #{{{ if (ref $feed->{tags}) { $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]); } - writefile(htmlpage($guid->{page}), $config{srcdir}, + writefile(htmlfn($guid->{page}), $config{srcdir}, $template->output); # Set the mtime, this lets the build process get the right creation @@ -426,7 +432,11 @@ sub remove_feeds () { #{{{ sub pagefile ($) { #{{{ my $page=shift; - return "$config{srcdir}/".htmlpage($page); + return "$config{srcdir}/".htmlfn($page); +} #}}} + +sub htmlfn ($) { #{{{ + return shift().".html"; } #}}} 1