X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/b78e93098b29a9fbefde840e2cddc34c5e3f9745..b9dc3e84a512eecf66b2b3e73be39530b588f49c:/IkiWiki/Plugin/aggregate.pm?ds=inline diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 4f4182930..78f8b409c 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -4,11 +4,12 @@ package IkiWiki::Plugin::aggregate; use warnings; use strict; -use IkiWiki; +use IkiWiki 2.00; use HTML::Entities; use HTML::Parser; use HTML::Tagset; use URI; +use open qw{:utf8 :std}; my %feeds; my %guids; @@ -24,20 +25,22 @@ sub import { #{{{ sub getopt () { #{{{ eval q{use Getopt::Long}; + error($@) if $@; Getopt::Long::Configure('pass_through'); GetOptions("aggregate" => \$config{aggregate}); } #}}} sub checkconfig () { #{{{ - IkiWiki::lockwiki(); + my $nolock=($config{post_commit} && ! IkiWiki::commit_hook_enabled()); + IkiWiki::lockwiki() unless $nolock; loadstate(); - if ($config{aggregate}) { + if ($config{aggregate} && ! $nolock) { IkiWiki::loadindex(); aggregate(); expire(); savestate(); } - IkiWiki::unlockwiki(); + IkiWiki::unlockwiki() unless $nolock; } #}}} sub filter (@) { #{{{ @@ -56,7 +59,7 @@ sub preprocess (@) { #{{{ foreach my $required (qw{name url}) { if (! exists $params{$required}) { - return "[[aggregate plugin missing $required parameter]]"; + return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]"; } } @@ -84,7 +87,7 @@ sub preprocess (@) { #{{{ $feed->{lastupdate}=0 unless defined $feed->{lastupdate}; $feed->{numposts}=0 unless defined $feed->{numposts}; $feed->{newposts}=0 unless defined $feed->{newposts}; - $feed->{message}="new feed" unless defined $feed->{message}; + $feed->{message}=gettext("new feed") unless defined $feed->{message}; $feed->{error}=0 unless defined $feed->{error}; $feed->{tags}=[]; while (@_) { @@ -98,8 +101,9 @@ sub preprocess (@) { #{{{ return "{url}."\">".$feed->{name}.": ". ($feed->{error} ? "" : "").$feed->{message}. ($feed->{error} ? "" : ""). - " (".$feed->{numposts}." posts". - ($feed->{newposts} ? "; ".$feed->{newposts}." new" : ""). + " (".$feed->{numposts}." ".gettext("posts"). + ($feed->{newposts} ? "; ".$feed->{newposts}. + " ".gettext("new") : ""). ")"; } # }}} @@ -149,9 +153,12 @@ sub loadstate () { #{{{ sub savestate () { #{{{ eval q{use HTML::Entities}; - die $@ if $@; - open (OUT, ">$config{wikistatedir}/aggregate" || - die "$config{wikistatedir}/aggregate: $!"); + error($@) if $@; + my $newfile="$config{wikistatedir}/aggregate.new"; + # TODO: This cleanup function could use improvement. Any newly + # aggregated files are left behind unrecorded, and should be deleted. + my $cleanup = sub { unlink($newfile) }; + open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); foreach my $data (values %feeds, values %guids) { if ($data->{remove}) { if ($data->{name}) { @@ -185,9 +192,11 @@ sub savestate () { #{{{ push @line, "$field=".$data->{$field}; } } - print OUT join(" ", @line)."\n"; + print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup); } - close OUT; + close OUT || error("save $newfile: $!", $cleanup); + rename($newfile, "$config{wikistatedir}/aggregate") || + error("rename $newfile: $!", $cleanup); } #}}} sub expire () { #{{{ @@ -200,13 +209,14 @@ sub expire () { #{{{ if ($feed->{expireage}) { my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24; if ($days_old > $feed->{expireage}) { - debug("expiring ".$item->{page}." ($days_old days old)"); + debug(sprintf(gettext("expiring %s (%s days old)"), + $item->{page}, $days_old)); $item->{expired}=1; } } elsif ($feed->{expirecount} && $count >= $feed->{expirecount}) { - debug("expiring ".$item->{page}); + debug(sprintf(gettext("expiring %s"), $item->{page})); $item->{expired}=1; } else { @@ -218,32 +228,59 @@ sub expire () { #{{{ sub aggregate () { #{{{ eval q{use XML::Feed}; - die $@ if $@; + error($@) if $@; + eval q{use URI::Fetch}; + error($@) if $@; eval q{use HTML::Entities}; - die $@ if $@; + error($@) if $@; foreach my $feed (values %feeds) { next unless $config{rebuild} || time - $feed->{lastupdate} >= $feed->{updateinterval}; $feed->{lastupdate}=time; $feed->{newposts}=0; + $feed->{message}=sprintf(gettext("processed ok at %s"), + displaytime($feed->{lastupdate})); + $feed->{error}=0; $IkiWiki::forcerebuild{$feed->{sourcepage}}=1; - debug("checking feed ".$feed->{name}." ..."); + debug(sprintf(gettext("checking feed %s ..."), $feed->{name})); if (! length $feed->{feedurl}) { my @urls=XML::Feed->find_feeds($feed->{url}); if (! @urls) { - $feed->{message}="could not find feed at ".$feed->{feedurl}; + $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url}); $feed->{error}=1; debug($feed->{message}); next; } $feed->{feedurl}=pop @urls; } - my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))}; + my $res=URI::Fetch->fetch($feed->{feedurl}); + if (! $res) { + $feed->{message}=URI::Fetch->errstr; + $feed->{error}=1; + debug($feed->{message}); + next; + } + if ($res->status == URI::Fetch::URI_GONE()) { + $feed->{message}=gettext("feed not found"); + $feed->{error}=1; + debug($feed->{message}); + next; + } + my $content=$res->content; + my $f=eval{XML::Feed->parse(\$content)}; + if ($@) { + # One common cause of XML::Feed crashing is a feed + # that contains invalid UTF-8 sequences. Convert + # feed to ascii to try to work around. + $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)")); + $content=Encode::decode_utf8($content); + $f=eval{XML::Feed->parse(\$content)}; + } if ($@) { - $feed->{message}="feed crashed XML::Feed! $@"; + $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; $feed->{error}=1; debug($feed->{message}); next; @@ -260,15 +297,11 @@ sub aggregate () { #{{{ feed => $feed, title => defined $entry->title ? decode_entities($entry->title) : "untitled", link => $entry->link, - content => $entry->content->body, + content => defined $entry->content->body ? $entry->content->body : "", guid => defined $entry->id ? $entry->id : time."_".$feed->name, ctime => $entry->issued ? ($entry->issued->epoch || time) : time, ); } - - $feed->{message}="processed ok at ". - displaytime($feed->{lastupdate}); - $feed->{error}=0; } } #}}} @@ -281,7 +314,7 @@ sub add_page (@) { #{{{ if (exists $guids{$params{guid}}) { # updating an existing post $guid=$guids{$params{guid}}; - next if $guid->{expired}; + return if $guid->{expired}; } else { # new post @@ -306,8 +339,21 @@ sub add_page (@) { #{{{ -e pagefile($page.$c)) { $c++ } + + # Make sure that the file name isn't too long. + # NB: This doesn't check for path length limits. + my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX); + if (defined $max && length(htmlfn($page)) >= $max) { + $c=""; + $page=$feed->{dir}."/item"; + while (exists $IkiWiki::pagecase{lc $page.$c} || + -e pagefile($page.$c)) { + $c++ + } + } + $guid->{page}=$page; - debug("creating new page $page"); + debug(sprintf(gettext("creating new page %s"), $page)); } $guid->{feed}=$feed->{name}; @@ -315,6 +361,7 @@ sub add_page (@) { #{{{ # to avoid unneccessary rebuilding. The mtime from rss cannot be # trusted; let's use a digest. eval q{use Digest::MD5 'md5_hex'}; + error($@) if $@; require Encode; my $digest=md5_hex(Encode::encode_utf8($params{content})); return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild}; @@ -332,7 +379,7 @@ sub add_page (@) { #{{{ if (ref $feed->{tags}) { $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]); } - writefile(htmlpage($guid->{page}), $config{srcdir}, + writefile(htmlfn($guid->{page}), $config{srcdir}, $template->output); # Set the mtime, this lets the build process get the right creation @@ -407,7 +454,11 @@ sub remove_feeds () { #{{{ sub pagefile ($) { #{{{ my $page=shift; - return "$config{srcdir}/".htmlpage($page); + return "$config{srcdir}/".htmlfn($page); +} #}}} + +sub htmlfn ($) { #{{{ + return shift().".html"; } #}}} 1