X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/e36f6b6a99c0472d3ca79762305d690c494d483c..cabd5140c4d6255afdcb527e7f6d7e7815e4aa43:/IkiWiki/Plugin/aggregate.pm diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 7c4ab3d10..71368e254 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -4,54 +4,74 @@ package IkiWiki::Plugin::aggregate; use warnings; use strict; -use IkiWiki; +use IkiWiki 2.00; use HTML::Entities; use HTML::Parser; use HTML::Tagset; use URI; +use open qw{:utf8 :std}; my %feeds; my %guids; sub import { #{{{ - IkiWiki::hook(type => "getopt", id => "aggregate", - call => \&getopt); - IkiWiki::hook(type => "checkconfig", id => "aggregate", - call => \&checkconfig); - IkiWiki::hook(type => "filter", id => "aggregate", - call => \&filter); - IkiWiki::hook(type => "preprocess", id => "aggregate", - call => \&preprocess); - IkiWiki::hook(type => "delete", id => "aggregate", - call => \&delete); - IkiWiki::hook(type => "savestate", id => "aggregate", - call => \&savestate); + hook(type => "getopt", id => "aggregate", call => \&getopt); + hook(type => "checkconfig", id => "aggregate", call => \&checkconfig); + hook(type => "needsbuild", id => "aggregate", call => \&needsbuild); + hook(type => "preprocess", id => "aggregate", call => \&preprocess); + hook(type => "delete", id => "aggregate", call => \&delete); + hook(type => "savestate", id => "aggregate", call => \&savestate); } # }}} sub getopt () { #{{{ eval q{use Getopt::Long}; + error($@) if $@; Getopt::Long::Configure('pass_through'); - GetOptions("aggregate" => \$IkiWiki::config{aggregate}); + GetOptions("aggregate" => \$config{aggregate}); } #}}} sub checkconfig () { #{{{ - loadstate(); - if ($IkiWiki::config{aggregate}) { - IkiWiki::loadindex(); - aggregate(); - savestate(); + if ($config{aggregate} && ! ($config{post_commit} && + IkiWiki::commit_hook_enabled())) { + if (! IkiWiki::lockwiki(0)) { + debug("wiki is locked by another process, not aggregating"); + exit 1; + } + + # Fork a child process to handle the aggregation. + # The parent process will then handle building the result. + # This avoids messy code to clear state accumulated while + # aggregating. + defined(my $pid = fork) or error("Can't fork: $!"); + if (! $pid) { + loadstate(); + IkiWiki::loadindex(); + aggregate(); + expire(); + savestate(); + exit 0; + } + waitpid($pid,0); + if ($?) { + error "aggregation failed with code $?"; + } + + IkiWiki::unlockwiki(); } } #}}} -sub filter (@) { #{{{ - my %params=@_; - my $page=$params{page}; - - # Mark all feeds originating on this page as removable; - # preprocess will unmark those that still exist. - remove_feeds($page); +sub needsbuild (@) { #{{{ + my $needsbuild=shift; + + loadstate(); # if not already loaded - return $params{content}; + foreach my $feed (values %feeds) { + if (grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) { + # Mark all feeds originating on this page as removable; + # preprocess will unmark those that still exist. + remove_feeds($feed->{sourcepage}); + } + } } # }}} sub preprocess (@) { #{{{ @@ -59,7 +79,7 @@ sub preprocess (@) { #{{{ foreach my $required (qw{name url}) { if (! exists $params{$required}) { - return "[[aggregate plugin missing $required parameter]]"; + return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]"; } } @@ -76,17 +96,19 @@ sub preprocess (@) { #{{{ $feed->{url}=$params{url}; my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name}); $dir=~s/^\/+//; - ($dir)=$dir=~/$IkiWiki::config{wiki_file_regexp}/; + ($dir)=$dir=~/$config{wiki_file_regexp}/; $feed->{dir}=$dir; $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : ""; $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60; $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0; $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0; delete $feed->{remove}; + delete $feed->{expired}; $feed->{lastupdate}=0 unless defined $feed->{lastupdate}; $feed->{numposts}=0 unless defined $feed->{numposts}; $feed->{newposts}=0 unless defined $feed->{newposts}; - $feed->{message}="new feed" unless defined $feed->{message}; + $feed->{message}=gettext("new feed") unless defined $feed->{message}; + $feed->{error}=0 unless defined $feed->{error}; $feed->{tags}=[]; while (@_) { my $key=shift; @@ -97,8 +119,12 @@ sub preprocess (@) { #{{{ } return "{url}."\">".$feed->{name}.": ". - "".$feed->{message}." (".$feed->{numposts}. - " stored posts; ".$feed->{newposts}." new)
"; + ($feed->{error} ? "" : "").$feed->{message}. + ($feed->{error} ? "" : ""). + " (".$feed->{numposts}." ".gettext("posts"). + ($feed->{newposts} ? "; ".$feed->{newposts}. + " ".gettext("new") : ""). + ")"; } # }}} sub delete (@) { #{{{ @@ -106,15 +132,18 @@ sub delete (@) { #{{{ # Remove feed data for removed pages. foreach my $file (@files) { - my $page=IkiWiki::pagename($file); + my $page=pagename($file); remove_feeds($page); } } #}}} +my $state_loaded=0; sub loadstate () { #{{{ - if (-e "$IkiWiki::config{wikistatedir}/aggregate") { - open (IN, "$IkiWiki::config{wikistatedir}/aggregate" || - die "$IkiWiki::config{wikistatedir}/aggregate: $!"); + return if $state_loaded; + $state_loaded=1; + if (-e "$config{wikistatedir}/aggregate") { + open(IN, "$config{wikistatedir}/aggregate") || + die "$config{wikistatedir}/aggregate: $!"; while () { $_=IkiWiki::possibly_foolish_untaint($_); chomp; @@ -146,10 +175,12 @@ sub loadstate () { #{{{ } #}}} sub savestate () { #{{{ + return unless $state_loaded; eval q{use HTML::Entities}; - die $@ if $@; - open (OUT, ">$IkiWiki::config{wikistatedir}/aggregate" || - die "$IkiWiki::config{wikistatedir}/aggregate: $!"); + error($@) if $@; + my $newfile="$config{wikistatedir}/aggregate.new"; + my $cleanup = sub { unlink($newfile) }; + open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); foreach my $data (values %feeds, values %guids) { if ($data->{remove}) { if ($data->{name}) { @@ -160,10 +191,16 @@ sub savestate () { #{{{ } } else { - unlink pagefile($data->{page}); + unlink pagefile($data->{page}) + if exists $data->{page}; } next; } + elsif ($data->{expired} && exists $data->{page}) { + unlink pagefile($data->{page}); + delete $data->{page}; + delete $data->{md5}; + } my @line; foreach my $field (keys %$data) { @@ -178,62 +215,129 @@ sub savestate () { #{{{ push @line, "$field=".$data->{$field}; } } - print OUT join(" ", @line)."\n"; + print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup); + } + close OUT || error("save $newfile: $!", $cleanup); + rename($newfile, "$config{wikistatedir}/aggregate") || + error("rename $newfile: $!", $cleanup); +} #}}} + +sub expire () { #{{{ + foreach my $feed (values %feeds) { + next unless $feed->{expireage} || $feed->{expirecount}; + my $count=0; + my %seen; + foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} } + grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} } + values %guids) { + if ($feed->{expireage}) { + my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24; + if ($days_old > $feed->{expireage}) { + debug(sprintf(gettext("expiring %s (%s days old)"), + $item->{page}, int($days_old))); + $item->{expired}=1; + } + } + elsif ($feed->{expirecount} && + $count >= $feed->{expirecount}) { + debug(sprintf(gettext("expiring %s"), $item->{page})); + $item->{expired}=1; + } + else { + if (! $seen{$item->{page}}) { + $seen{$item->{page}}=1; + $count++; + } + } + } } - close OUT; } #}}} sub aggregate () { #{{{ eval q{use XML::Feed}; - die $@ if $@; + error($@) if $@; + eval q{use URI::Fetch}; + error($@) if $@; eval q{use HTML::Entities}; - die $@ if $@; + error($@) if $@; foreach my $feed (values %feeds) { - next unless $IkiWiki::config{rebuild} || + next unless $config{rebuild} || time - $feed->{lastupdate} >= $feed->{updateinterval}; $feed->{lastupdate}=time; $feed->{newposts}=0; + $feed->{message}=sprintf(gettext("processed ok at %s"), + displaytime($feed->{lastupdate})); + $feed->{error}=0; $IkiWiki::forcerebuild{$feed->{sourcepage}}=1; - IkiWiki::debug("checking feed ".$feed->{name}." ..."); + debug(sprintf(gettext("checking feed %s ..."), $feed->{name})); if (! length $feed->{feedurl}) { my @urls=XML::Feed->find_feeds($feed->{url}); if (! @urls) { - $feed->{message}="could not find feed at ".$feed->{feedurl}; - IkiWiki::debug($feed->{message}); + $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url}); + $feed->{error}=1; + debug($feed->{message}); next; } $feed->{feedurl}=pop @urls; } - my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))}; + my $res=URI::Fetch->fetch($feed->{feedurl}); + if (! $res) { + $feed->{message}=URI::Fetch->errstr; + $feed->{error}=1; + debug($feed->{message}); + next; + } + if ($res->status == URI::Fetch::URI_GONE()) { + $feed->{message}=gettext("feed not found"); + $feed->{error}=1; + debug($feed->{message}); + next; + } + my $content=$res->content; + my $f=eval{XML::Feed->parse(\$content)}; + if ($@) { + # One common cause of XML::Feed crashing is a feed + # that contains invalid UTF-8 sequences. Convert + # feed to ascii to try to work around. + $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)")); + $content=Encode::decode_utf8($content); + $f=eval{XML::Feed->parse(\$content)}; + } + if ($@) { + # Another possibility is badly escaped entities. + $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)")); + $content=~s/\&(?!amp)(\w+);/&$1;/g; + $content=Encode::decode_utf8($content); + $f=eval{XML::Feed->parse(\$content)}; + } if ($@) { - $feed->{message}="feed crashed XML::Feed! $@"; - IkiWiki::debug($feed->{message}); + $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; + $feed->{error}=1; + debug($feed->{message}); next; } if (! $f) { $feed->{message}=XML::Feed->errstr; - IkiWiki::debug($feed->{message}); + $feed->{error}=1; + debug($feed->{message}); next; } foreach my $entry ($f->entries) { add_page( feed => $feed, + copyright => $f->copyright, title => defined $entry->title ? decode_entities($entry->title) : "untitled", link => $entry->link, - content => $entry->content->body, + content => defined $entry->content->body ? $entry->content->body : "", guid => defined $entry->id ? $entry->id : time."_".$feed->name, ctime => $entry->issued ? ($entry->issued->epoch || time) : time, ); } - - $feed->{message}="processed ok"; } - - # TODO: expiry } #}}} sub add_page (@) { #{{{ @@ -245,6 +349,7 @@ sub add_page (@) { #{{{ if (exists $guids{$params{guid}}) { # updating an existing post $guid=$guids{$params{guid}}; + return if $guid->{expired}; } else { # new post @@ -260,18 +365,30 @@ sub add_page (@) { #{{{ # directory name or trigger ".." disallowing code. $page=~s!([/.])!"__".ord($1)."__"!eg; $page=$feed->{dir}."/".$page; - $page=lc($page); - ($page)=$page=~/$IkiWiki::config{wiki_file_regexp}/; + ($page)=$page=~/$config{wiki_file_regexp}/; if (! defined $page || ! length $page) { $page=$feed->{dir}."/item"; } my $c=""; - while (exists $IkiWiki::pagesources{$page.$c} || + while (exists $IkiWiki::pagecase{lc $page.$c} || -e pagefile($page.$c)) { $c++ } + + # Make sure that the file name isn't too long. + # NB: This doesn't check for path length limits. + my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX); + if (defined $max && length(htmlfn($page)) >= $max) { + $c=""; + $page=$feed->{dir}."/item"; + while (exists $IkiWiki::pagecase{lc $page.$c} || + -e pagefile($page.$c)) { + $c++ + } + } + $guid->{page}=$page; - IkiWiki::debug("creating new page $page"); + debug(sprintf(gettext("creating new page %s"), $page)); } $guid->{feed}=$feed->{name}; @@ -279,29 +396,33 @@ sub add_page (@) { #{{{ # to avoid unneccessary rebuilding. The mtime from rss cannot be # trusted; let's use a digest. eval q{use Digest::MD5 'md5_hex'}; + error($@) if $@; require Encode; my $digest=md5_hex(Encode::encode_utf8($params{content})); - return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $IkiWiki::config{rebuild}; + return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild}; $guid->{md5}=$digest; # Create the page. - my $template=IkiWiki::template("aggregatepost.tmpl", blind_cache => 1); + my $template=template("aggregatepost.tmpl", blind_cache => 1); $template->param(title => $params{title}) if defined $params{title} && length($params{title}); $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl}))); $template->param(name => $feed->{name}); $template->param(url => $feed->{url}); + $template->param(copyright => $params{copyright}) + if defined $params{copyright} && length $params{copyright}; $template->param(permalink => urlabs($params{link}, $feed->{feedurl})) if defined $params{link}; if (ref $feed->{tags}) { $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]); } - IkiWiki::writefile($guid->{page}.".html", $IkiWiki::config{srcdir}, + writefile(htmlfn($guid->{page}), $config{srcdir}, $template->output); # Set the mtime, this lets the build process get the right creation # time on record for the new page. - utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime; + utime $mtime, $mtime, pagefile($guid->{page}) + if defined $mtime && $mtime <= time; } #}}} sub htmlescape ($) { #{{{ @@ -322,7 +443,7 @@ sub htmlabs ($$) { #{{{ # Convert links in html from relative to absolute. # Note that this is a heuristic, which is not specified by the rss # spec and may not be right for all feeds. Also, see Debian - # bug #XXXX TODO: get bug. + # bug #381359. my $html=shift; my $urlbase=shift; @@ -371,7 +492,11 @@ sub remove_feeds () { #{{{ sub pagefile ($) { #{{{ my $page=shift; - return "$IkiWiki::config{srcdir}/$page.html"; + return "$config{srcdir}/".htmlfn($page); +} #}}} + +sub htmlfn ($) { #{{{ + return shift().".".$config{htmlext}; } #}}} 1