use warnings;
use strict;
-use IkiWiki;
+use IkiWiki 2.00;
use HTML::Entities;
use HTML::Parser;
use HTML::Tagset;
use URI;
+use open qw{:utf8 :std};
my %feeds;
my %guids;
sub getopt () { #{{{
eval q{use Getopt::Long};
+ error($@) if $@;
Getopt::Long::Configure('pass_through');
GetOptions("aggregate" => \$config{aggregate});
} #}}}
sub checkconfig () { #{{{
- IkiWiki::lockwiki();
- loadstate();
- if ($config{aggregate}) {
+ if ($config{aggregate} && ! ($config{post_commit} &&
+ IkiWiki::commit_hook_enabled())) {
+ if (! IkiWiki::lockwiki(0)) {
+ debug("wiki is locked by another process, not aggregating");
+ exit 1;
+ }
+
+ loadstate();
IkiWiki::loadindex();
aggregate();
+ expire();
savestate();
+ clearstate();
+
+ IkiWiki::unlockwiki();
}
- IkiWiki::unlockwiki();
} #}}}
sub filter (@) { #{{{
my %params=@_;
my $page=$params{page};
+ loadstate(); # if not already loaded
# Mark all feeds originating on this page as removable;
# preprocess will unmark those that still exist.
remove_feeds($page);
foreach my $required (qw{name url}) {
if (! exists $params{$required}) {
- return "[[aggregate plugin missing $required parameter]]";
+ return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
}
}
$feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
$feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
delete $feed->{remove};
+ delete $feed->{expired};
$feed->{lastupdate}=0 unless defined $feed->{lastupdate};
$feed->{numposts}=0 unless defined $feed->{numposts};
$feed->{newposts}=0 unless defined $feed->{newposts};
- $feed->{message}="new feed" unless defined $feed->{message};
+ $feed->{message}=gettext("new feed") unless defined $feed->{message};
+ $feed->{error}=0 unless defined $feed->{error};
$feed->{tags}=[];
while (@_) {
my $key=shift;
}
return "<a href=\"".$feed->{url}."\">".$feed->{name}."</a>: ".
- "<i>".$feed->{message}."</i> (".$feed->{numposts}." posts".
- ($feed->{newposts} ? "; ".$feed->{newposts}." new" : "").
+ ($feed->{error} ? "<em>" : "").$feed->{message}.
+ ($feed->{error} ? "</em>" : "").
+ " (".$feed->{numposts}." ".gettext("posts").
+ ($feed->{newposts} ? "; ".$feed->{newposts}.
+ " ".gettext("new") : "").
")";
} # }}}
}
} #}}}
+my $state_loaded=0;
sub loadstate () { #{{{
+ return if $state_loaded;
if (-e "$config{wikistatedir}/aggregate") {
open (IN, "$config{wikistatedir}/aggregate" ||
die "$config{wikistatedir}/aggregate: $!");
}
close IN;
+
+ $state_loaded=1;
}
} #}}}
sub savestate () { #{{{
eval q{use HTML::Entities};
- die $@ if $@;
- open (OUT, ">$config{wikistatedir}/aggregate" ||
- die "$config{wikistatedir}/aggregate: $!");
+ error($@) if $@;
+ my $newfile="$config{wikistatedir}/aggregate.new";
+ # TODO: This cleanup function could use improvement. Any newly
+ # aggregated files are left behind unrecorded, and should be deleted.
+ my $cleanup = sub { unlink($newfile) };
+ open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) {
if ($data->{remove}) {
if ($data->{name}) {
}
next;
}
+ elsif ($data->{expired} && exists $data->{page}) {
+ unlink pagefile($data->{page});
+ delete $data->{page};
+ delete $data->{md5};
+ }
my @line;
foreach my $field (keys %$data) {
push @line, "$field=".$data->{$field};
}
}
- print OUT join(" ", @line)."\n";
+ print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
+ }
+ close OUT || error("save $newfile: $!", $cleanup);
+ rename($newfile, "$config{wikistatedir}/aggregate") ||
+ error("rename $newfile: $!", $cleanup);
+} #}}}
+
+sub clearstate () { #{{{
+ %feeds=();
+ %guids=();
+ $state_loaded=0;
+} #}}}
+
+sub expire () { #{{{
+ foreach my $feed (values %feeds) {
+ next unless $feed->{expireage} || $feed->{expirecount};
+ my $count=0;
+ foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
+ grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
+ values %guids) {
+ if ($feed->{expireage}) {
+ my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
+ if ($days_old > $feed->{expireage}) {
+ debug(sprintf(gettext("expiring %s (%s days old)"),
+ $item->{page}, $days_old));
+ $item->{expired}=1;
+ }
+ }
+ elsif ($feed->{expirecount} &&
+ $count >= $feed->{expirecount}) {
+ debug(sprintf(gettext("expiring %s"), $item->{page}));
+ $item->{expired}=1;
+ }
+ else {
+ $count++;
+ }
+ }
}
- close OUT;
} #}}}
sub aggregate () { #{{{
eval q{use XML::Feed};
- die $@ if $@;
+ error($@) if $@;
+ eval q{use URI::Fetch};
+ error($@) if $@;
eval q{use HTML::Entities};
- die $@ if $@;
+ error($@) if $@;
foreach my $feed (values %feeds) {
next unless $config{rebuild} ||
time - $feed->{lastupdate} >= $feed->{updateinterval};
$feed->{lastupdate}=time;
$feed->{newposts}=0;
+ $feed->{message}=sprintf(gettext("processed ok at %s"),
+ displaytime($feed->{lastupdate}));
+ $feed->{error}=0;
$IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
- debug("checking feed ".$feed->{name}." ...");
+ debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
if (! length $feed->{feedurl}) {
my @urls=XML::Feed->find_feeds($feed->{url});
if (! @urls) {
- $feed->{message}="could not find feed at ".$feed->{feedurl};
+ $feed->{message}=sprintf(gettext("could not find feed at %s"), $feed->{url});
+ $feed->{error}=1;
debug($feed->{message});
next;
}
$feed->{feedurl}=pop @urls;
}
- my $f=eval{XML::Feed->parse(URI->new($feed->{feedurl}))};
+ my $res=URI::Fetch->fetch($feed->{feedurl});
+ if (! $res) {
+ $feed->{message}=URI::Fetch->errstr;
+ $feed->{error}=1;
+ debug($feed->{message});
+ next;
+ }
+ if ($res->status == URI::Fetch::URI_GONE()) {
+ $feed->{message}=gettext("feed not found");
+ $feed->{error}=1;
+ debug($feed->{message});
+ next;
+ }
+ my $content=$res->content;
+ my $f=eval{XML::Feed->parse(\$content)};
+ if ($@) {
+ # One common cause of XML::Feed crashing is a feed
+ # that contains invalid UTF-8 sequences. Convert
+ # feed to ascii to try to work around.
+ $feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
+ $content=Encode::decode_utf8($content);
+ $f=eval{XML::Feed->parse(\$content)};
+ }
if ($@) {
- $feed->{message}="feed crashed XML::Feed! $@";
+ $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
+ $feed->{error}=1;
debug($feed->{message});
next;
}
if (! $f) {
$feed->{message}=XML::Feed->errstr;
+ $feed->{error}=1;
debug($feed->{message});
next;
}
feed => $feed,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
link => $entry->link,
- content => $entry->content->body,
+ content => defined $entry->content->body ? $entry->content->body : "",
guid => defined $entry->id ? $entry->id : time."_".$feed->name,
ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
);
}
-
- $feed->{message}="processed ok at ".
- displaytime($feed->{lastupdate});
}
-
- # TODO: expiry
} #}}}
sub add_page (@) { #{{{
if (exists $guids{$params{guid}}) {
# updating an existing post
$guid=$guids{$params{guid}};
+ return if $guid->{expired};
}
else {
# new post
-e pagefile($page.$c)) {
$c++
}
+
+ # Make sure that the file name isn't too long.
+ # NB: This doesn't check for path length limits.
+ my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
+ if (defined $max && length(htmlfn($page)) >= $max) {
+ $c="";
+ $page=$feed->{dir}."/item";
+ while (exists $IkiWiki::pagecase{lc $page.$c} ||
+ -e pagefile($page.$c)) {
+ $c++
+ }
+ }
+
$guid->{page}=$page;
- debug("creating new page $page");
+ debug(sprintf(gettext("creating new page %s"), $page));
}
$guid->{feed}=$feed->{name};
# to avoid unneccessary rebuilding. The mtime from rss cannot be
# trusted; let's use a digest.
eval q{use Digest::MD5 'md5_hex'};
+ error($@) if $@;
require Encode;
my $digest=md5_hex(Encode::encode_utf8($params{content}));
return unless ! exists $guid->{md5} || $guid->{md5} ne $digest || $config{rebuild};
if (ref $feed->{tags}) {
$template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
}
- writefile(htmlpage($guid->{page}), $config{srcdir},
+ writefile(htmlfn($guid->{page}), $config{srcdir},
$template->output);
# Set the mtime, this lets the build process get the right creation
sub pagefile ($) { #{{{
my $page=shift;
- return "$config{srcdir}/".htmlpage($page);
+ return "$config{srcdir}/".htmlfn($page);
+} #}}}
+
+sub htmlfn ($) { #{{{
+ return shift().".html";
} #}}}
1