X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/88830016154d99a0155e1cee58582e9f32dcca51..c207086282b2f058f647b7fa810f3da54fe5fe4b:/IkiWiki/Plugin/aggregate.pm diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 98e534366..cb165acd2 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -4,50 +4,93 @@ package IkiWiki::Plugin::aggregate; use warnings; use strict; -use IkiWiki; +use IkiWiki 2.00; +use HTML::Parser; +use HTML::Tagset; +use HTML::Entities; +use URI; +use open qw{:utf8 :std}; my %feeds; my %guids; sub import { #{{{ - IkiWiki::hook(type => "getopt", id => "aggregate", - call => \&getopt); - IkiWiki::hook(type => "checkconfig", id => "aggregate", - call => \&checkconfig); - IkiWiki::hook(type => "filter", id => "aggregate", - call => \&filter); - IkiWiki::hook(type => "preprocess", id => "aggregate", - call => \&preprocess); - IkiWiki::hook(type => "delete", id => "aggregate", - call => \&delete); - IkiWiki::hook(type => "savestate", id => "aggregate", - call => \&savestate); + hook(type => "getopt", id => "aggregate", call => \&getopt); + hook(type => "checkconfig", id => "aggregate", call => \&checkconfig); + hook(type => "needsbuild", id => "aggregate", call => \&needsbuild); + hook(type => "preprocess", id => "aggregate", call => \&preprocess); + hook(type => "delete", id => "aggregate", call => \&delete); + hook(type => "savestate", id => "aggregate", call => \&savestate); } # }}} sub getopt () { #{{{ eval q{use Getopt::Long}; + error($@) if $@; Getopt::Long::Configure('pass_through'); - GetOptions("aggregate" => \$IkiWiki::config{aggregate}); + GetOptions("aggregate" => \$config{aggregate}); } #}}} sub checkconfig () { #{{{ - loadstate(); - if ($IkiWiki::config{aggregate}) { - IkiWiki::loadindex(); - aggregate(); - savestate(); + if ($config{aggregate} && ! ($config{post_commit} && + IkiWiki::commit_hook_enabled())) { + # See if any feeds need aggregation. + loadstate(); + my @feeds=needsaggregate(); + return unless @feeds; + if (! lockaggregate()) { + debug("an aggregation process is already running"); + return; + } + # force a later rebuild of source pages + $IkiWiki::forcerebuild{$_->{sourcepage}}=1 + foreach @feeds; + + # Fork a child process to handle the aggregation. + # The parent process will then handle building the + # result. This avoids messy code to clear state + # accumulated while aggregating. + defined(my $pid = fork) or error("Can't fork: $!"); + if (! $pid) { + IkiWiki::loadindex(); + + # Aggregation happens without the main wiki lock + # being held. This allows editing pages etc while + # aggregation is running. + aggregate(@feeds); + + IkiWiki::lockwiki; + # Merge changes, since aggregation state may have + # changed on disk while the aggregation was happening. + mergestate(); + expire(); + savestate(); + IkiWiki::unlockwiki; + exit 0; + } + waitpid($pid,0); + if ($?) { + error "aggregation failed with code $?"; + } + + clearstate(); + unlockaggregate(); } } #}}} -sub filter (@) { #{{{ - my %params=@_; - my $page=$params{page}; - - # Mark all feeds originating on this page as removable; - # preprocess will unmark those that still exist. - remove_feeds($page); +sub needsbuild (@) { #{{{ + my $needsbuild=shift; + + loadstate(); - return $params{content}; + foreach my $feed (values %feeds) { + if (exists $pagesources{$feed->{sourcepage}} && + grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) { + # Mark all feeds originating on this page as + # not yet seen; preprocess will unmark those that + # still exist. + markunseen($feed->{sourcepage}); + } + } } # }}} sub preprocess (@) { #{{{ @@ -55,7 +98,7 @@ sub preprocess (@) { #{{{ foreach my $required (qw{name url}) { if (! exists $params{$required}) { - return "[[aggregate plugin missing $required parameter]]"; + return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]"; } } @@ -70,19 +113,20 @@ sub preprocess (@) { #{{{ $feed->{name}=$name; $feed->{sourcepage}=$params{page}; $feed->{url}=$params{url}; - my $dir=exists $params{dir} ? $params{dir} : "feed/".IkiWiki::titlepage($params{name}); + my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name}); $dir=~s/^\/+//; - ($dir)=$dir=~/$IkiWiki::config{wiki_file_regexp}/; + ($dir)=$dir=~/$config{wiki_file_regexp}/; $feed->{dir}=$dir; $feed->{feedurl}=defined $params{feedurl} ? $params{feedurl} : ""; $feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60; $feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0; $feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0; - delete $feed->{remove}; + delete $feed->{unseen}; $feed->{lastupdate}=0 unless defined $feed->{lastupdate}; $feed->{numposts}=0 unless defined $feed->{numposts}; $feed->{newposts}=0 unless defined $feed->{newposts}; - $feed->{message}="new feed" unless defined $feed->{message}; + $feed->{message}=gettext("new feed") unless defined $feed->{message}; + $feed->{error}=0 unless defined $feed->{error}; $feed->{tags}=[]; while (@_) { my $key=shift; @@ -93,8 +137,12 @@ sub preprocess (@) { #{{{ } return "{url}."\">".$feed->{name}.": ". - "".$feed->{message}." (".$feed->{numposts}. - " stored posts; ".$feed->{newposts}." new)
"; + ($feed->{error} ? "" : "").$feed->{message}. + ($feed->{error} ? "" : ""). + " (".$feed->{numposts}." ".gettext("posts"). + ($feed->{newposts} ? "; ".$feed->{newposts}. + " ".gettext("new") : ""). + ")"; } # }}} sub delete (@) { #{{{ @@ -102,17 +150,29 @@ sub delete (@) { #{{{ # Remove feed data for removed pages. foreach my $file (@files) { - my $page=IkiWiki::pagename($file); - remove_feeds($page); + my $page=pagename($file); + markunseen($page); } } #}}} +sub markunseen ($) { #{{{ + my $page=shift; + + foreach my $id (keys %feeds) { + if ($feeds{$id}->{sourcepage} eq $page) { + $feeds{$id}->{unseen}=1; + } + } +} #}}} + +my $state_loaded=0; + sub loadstate () { #{{{ - eval q{use HTML::Entities}; - die $@ if $@; - if (-e "$IkiWiki::config{wikistatedir}/aggregate") { - open (IN, "$IkiWiki::config{wikistatedir}/aggregate" || - die "$IkiWiki::config{wikistatedir}/aggregate: $!"); + return if $state_loaded; + $state_loaded=1; + if (-e "$config{wikistatedir}/aggregate") { + open(IN, "$config{wikistatedir}/aggregate") || + die "$config{wikistatedir}/aggregate: $!"; while () { $_=IkiWiki::possibly_foolish_untaint($_); chomp; @@ -144,25 +204,12 @@ sub loadstate () { #{{{ } #}}} sub savestate () { #{{{ - eval q{use HTML::Entities}; - die $@ if $@; - open (OUT, ">$IkiWiki::config{wikistatedir}/aggregate" || - die "$IkiWiki::config{wikistatedir}/aggregate: $!"); + return unless $state_loaded; + garbage_collect(); + my $newfile="$config{wikistatedir}/aggregate.new"; + my $cleanup = sub { unlink($newfile) }; + open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup); foreach my $data (values %feeds, values %guids) { - if ($data->{remove}) { - if ($data->{name}) { - foreach my $guid (values %guids) { - if ($guid->{feed} eq $data->{name}) { - $guid->{remove}=1; - } - } - } - else { - unlink pagefile($data->{page}); - } - next; - } - my @line; foreach my $field (keys %$data) { if ($field eq "name" || $field eq "feed" || @@ -176,62 +223,192 @@ sub savestate () { #{{{ push @line, "$field=".$data->{$field}; } } - print OUT join(" ", @line)."\n"; + print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup); } - close OUT; + close OUT || error("save $newfile: $!", $cleanup); + rename($newfile, "$config{wikistatedir}/aggregate") || + error("rename $newfile: $!", $cleanup); } #}}} -sub aggregate () { #{{{ - eval q{use XML::Feed}; - die $@ if $@; - eval q{use HTML::Entities}; - die $@ if $@; +sub garbage_collect () { #{{{ + foreach my $name (keys %feeds) { + # remove any feeds that were not seen while building the pages + # that used to contain them + if ($feeds{$name}->{unseen}) { + delete $feeds{$name}; + } + } + + foreach my $guid (values %guids) { + # any guid whose feed is gone should be removed + if (! exists $feeds{$guid->{feed}}) { + unlink pagefile($guid->{page}) + if exists $guid->{page}; + delete $guids{$guid->{guid}}; + } + # handle expired guids + elsif ($guid->{expired} && exists $guid->{page}) { + unlink pagefile($guid->{page}); + delete $guid->{page}; + delete $guid->{md5}; + } + } +} #}}} + +sub mergestate () { #{{{ + # Load the current state in from disk, and merge into it + # values from the state in memory that might have changed + # during aggregation. + my %myfeeds=%feeds; + my %myguids=%guids; + clearstate(); + loadstate(); + + # All that can change in feed state during aggregation is a few + # fields. + foreach my $name (keys %myfeeds) { + if (exists $feeds{$name}) { + foreach my $field (qw{message lastupdate numposts + newposts error}) { + $feeds{$name}->{$field}=$myfeeds{$name}->{$field}; + } + } + } + + # New guids can be created during aggregation. + # It's also possible that guids were removed from the on-disk state + # while the aggregation was in process. That would only happen if + # their feed was also removed, so any removed guids added back here + # will be garbage collected later. + foreach my $guid (keys %myguids) { + if (! exists $guids{$guid}) { + $guids{$guid}=$myguids{$guid}; + } + } +} #}}} +sub clearstate () { #{{{ + %feeds=(); + %guids=(); + $state_loaded=0; +} #}}} + +sub expire () { #{{{ foreach my $feed (values %feeds) { - next unless $IkiWiki::config{rebuild} || - time - $feed->{lastupdate} >= $feed->{updateinterval}; + next unless $feed->{expireage} || $feed->{expirecount}; + my $count=0; + my %seen; + foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} } + grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} } + values %guids) { + if ($feed->{expireage}) { + my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24; + if ($days_old > $feed->{expireage}) { + debug(sprintf(gettext("expiring %s (%s days old)"), + $item->{page}, int($days_old))); + $item->{expired}=1; + } + } + elsif ($feed->{expirecount} && + $count >= $feed->{expirecount}) { + debug(sprintf(gettext("expiring %s"), $item->{page})); + $item->{expired}=1; + } + else { + if (! $seen{$item->{page}}) { + $seen{$item->{page}}=1; + $count++; + } + } + } + } +} #}}} + +sub needsaggregate () { #{{{ + return values %feeds if $config{rebuild}; + return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds; +} #}}} + +sub aggregate (@) { #{{{ + eval q{use XML::Feed}; + error($@) if $@; + eval q{use URI::Fetch}; + error($@) if $@; + + foreach my $feed (@_) { $feed->{lastupdate}=time; $feed->{newposts}=0; - $IkiWiki::forcerebuild{$feed->{sourcepage}}=1; + $feed->{message}=sprintf(gettext("processed ok at %s"), + displaytime($feed->{lastupdate})); + $feed->{error}=0; - IkiWiki::debug("checking feed ".$feed->{name}." ..."); + debug(sprintf(gettext("checking feed %s ..."), $feed->{name})); if (! length $feed->{feedurl}) { my @urls=XML::Feed->find_feeds($feed->{url}); if (! @urls) { - $feed->{message}="could not find feed at ".$feed->{feedurl}; - IkiWiki::debug($feed->{message}); + $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url}); + $feed->{error}=1; + debug($feed->{message}); next; } $feed->{feedurl}=pop @urls; } - my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))}; + my $res=URI::Fetch->fetch($feed->{feedurl}); + if (! $res) { + $feed->{message}=URI::Fetch->errstr; + $feed->{error}=1; + debug($feed->{message}); + next; + } + if ($res->status == URI::Fetch::URI_GONE()) { + $feed->{message}=gettext("feed not found"); + $feed->{error}=1; + debug($feed->{message}); + next; + } + my $content=$res->content; + my $f=eval{XML::Feed->parse(\$content)}; + if ($@) { + # One common cause of XML::Feed crashing is a feed + # that contains invalid UTF-8 sequences. Convert + # feed to ascii to try to work around. + $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)")); + $content=Encode::decode_utf8($content); + $f=eval{XML::Feed->parse(\$content)}; + } if ($@) { - $feed->{message}="feed crashed XML::Feed! $@"; - IkiWiki::debug($feed->{message}); + # Another possibility is badly escaped entities. + $feed->{message}.=" ".sprintf(gettext("(feed entities escaped)")); + $content=~s/\&(?!amp)(\w+);/&$1;/g; + $content=Encode::decode_utf8($content); + $f=eval{XML::Feed->parse(\$content)}; + } + if ($@) { + $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; + $feed->{error}=1; + debug($feed->{message}); next; } if (! $f) { $feed->{message}=XML::Feed->errstr; - IkiWiki::debug($feed->{message}); + $feed->{error}=1; + debug($feed->{message}); next; } foreach my $entry ($f->entries) { add_page( feed => $feed, + copyright => $f->copyright, title => defined $entry->title ? decode_entities($entry->title) : "untitled", link => $entry->link, - content => $entry->content->body, - guid => defined $entry->id ? $entry->id : time."_".$feed->name, + content => defined $entry->content->body ? $entry->content->body : "", + guid => defined $entry->id ? $entry->id : time."_".$feed->{name}, ctime => $entry->issued ? ($entry->issued->epoch || time) : time, ); } - - $feed->{message}="processed ok"; } - - # TODO: expiry } #}}} sub add_page (@) { #{{{ @@ -243,6 +420,7 @@ sub add_page (@) { #{{{ if (exists $guids{$params{guid}}) { # updating an existing post $guid=$guids{$params{guid}}; + return if $guid->{expired}; } else { # new post @@ -254,21 +432,34 @@ sub add_page (@) { #{{{ # assign it an unused page my $page=IkiWiki::titlepage($params{title}); - $page=~s!([/])!"__".ord($1)."__"!eg; # escape slashes in title + # escape slashes and periods in title so it doesn't specify + # directory name or trigger ".." disallowing code. + $page=~s!([/.])!"__".ord($1)."__"!eg; $page=$feed->{dir}."/".$page; - $page=lc($page); - ($page)=$page=~/$IkiWiki::config{wiki_file_regexp}/; + ($page)=$page=~/$config{wiki_file_regexp}/; if (! defined $page || ! length $page) { $page=$feed->{dir}."/item"; } - $page=~s/\.\.//g; # avoid ".." directory tricks my $c=""; - while (exists $IkiWiki::pagesources{$page.$c} || + while (exists $IkiWiki::pagecase{lc $page.$c} || -e pagefile($page.$c)) { $c++ } + + # Make sure that the file name isn't too long. + # NB: This doesn't check for path length limits. + my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX); + if (defined $max && length(htmlfn($page)) >= $max) { + $c=""; + $page=$feed->{dir}."/item"; + while (exists $IkiWiki::pagecase{lc $page.$c} || + -e pagefile($page.$c)) { + $c++ + } + } + $guid->{page}=$page; - IkiWiki::debug("creating new page $page"); + debug(sprintf(gettext("creating new page %s"), $page)); } $guid->{feed}=$feed->{name}; @@ -276,49 +467,117 @@ sub add_page (@) { #{{{ # to avoid unneccessary rebuilding. The mtime from rss cannot be # trusted; let's use a digest. eval q{use Digest::MD5 'md5_hex'}; + error($@) if $@; require Encode; my $digest=md5_hex(Encode::encode_utf8($params{content})); - return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $IkiWiki::config{rebuild}; + return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild}; $guid->{md5}=$digest; # Create the page. - my $template=IkiWiki::template("aggregatepost.tmpl", blind_cache => 1); - my $content=$params{content}; - $params{content}=~s/(? 1); $template->param(title => $params{title}) if defined $params{title} && length($params{title}); - $template->param(content => $params{content}); - $template->param(url => $feed->{url}); + $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl}))); $template->param(name => $feed->{name}); - $template->param(link => $params{link}) if defined $params{link}; + $template->param(url => $feed->{url}); + $template->param(copyright => $params{copyright}) + if defined $params{copyright} && length $params{copyright}; + $template->param(permalink => urlabs($params{link}, $feed->{feedurl})) + if defined $params{link}; if (ref $feed->{tags}) { $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]); } - IkiWiki::writefile($guid->{page}.".html", $IkiWiki::config{srcdir}, + writefile(htmlfn($guid->{page}), $config{srcdir}, $template->output); # Set the mtime, this lets the build process get the right creation # time on record for the new page. - utime $mtime, $mtime, pagefile($guid->{page}) if defined $mtime; + utime $mtime, $mtime, pagefile($guid->{page}) + if defined $mtime && $mtime <= time; } #}}} -sub remove_feeds () { #{{{ - my $page=shift; +sub htmlescape ($) { #{{{ + # escape accidental wikilinks and preprocessor stuff + my $html=shift; + $html=~s/(?{sourcepage} eq $page) { - $feeds{$id}->{remove}=1; - $removed{$id}=1; +sub urlabs ($$) { #{{{ + my $url=shift; + my $urlbase=shift; + + URI->new_abs($url, $urlbase)->as_string; +} #}}} + +sub htmlabs ($$) { #{{{ + # Convert links in html from relative to absolute. + # Note that this is a heuristic, which is not specified by the rss + # spec and may not be right for all feeds. Also, see Debian + # bug #381359. + my $html=shift; + my $urlbase=shift; + + my $ret=""; + my $p = HTML::Parser->new(api_version => 3); + $p->handler(default => sub { $ret.=join("", @_) }, "text"); + $p->handler(start => sub { + my ($tagname, $pos, $text) = @_; + if (ref $HTML::Tagset::linkElements{$tagname}) { + while (4 <= @$pos) { + # use attribute sets from right to left + # to avoid invalidating the offsets + # when replacing the values + my($k_offset, $k_len, $v_offset, $v_len) = + splice(@$pos, -4); + my $attrname = lc(substr($text, $k_offset, $k_len)); + next unless grep { $_ eq $attrname } @{$HTML::Tagset::linkElements{$tagname}}; + next unless $v_offset; # 0 v_offset means no value + my $v = substr($text, $v_offset, $v_len); + $v =~ s/^([\'\"])(.*)\1$/$2/; + my $new_v=urlabs($v, $urlbase); + $new_v =~ s/\"/"/g; # since we quote with "" + substr($text, $v_offset, $v_len) = qq("$new_v"); + } } - } + $ret.=$text; + }, "tagname, tokenpos, text"); + $p->parse($html); + $p->eof; + + return $ret; } #}}} sub pagefile ($) { #{{{ my $page=shift; - return "$IkiWiki::config{srcdir}/$page.html"; + return "$config{srcdir}/".htmlfn($page); +} #}}} + +sub htmlfn ($) { #{{{ + return shift().".".$config{htmlext}; +} #}}} + +my $aggregatelock; + +sub lockaggregate () { #{{{ + # Take an exclusive lock to prevent multiple concurrent aggregators. + # Returns true if the lock was aquired. + if (! -d $config{wikistatedir}) { + mkdir($config{wikistatedir}); + } + open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") || + error ("cannot open to $config{wikistatedir}/aggregatelock: $!"); + if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB + close($aggregatelock) || error("failed closing aggregatelock: $!"); + return 0; + } + return 1; +} #}}} + +sub unlockaggregate () { #{{{ + return close($aggregatelock) if $aggregatelock; + return; } #}}} 1