use warnings;
use strict;
-use IkiWiki 2.00;
+use IkiWiki 3.00;
use HTML::Parser;
use HTML::Tagset;
use HTML::Entities;
-use URI;
use open qw{:utf8 :std};
my %feeds;
my %guids;
-sub import { #{{{
+sub import {
hook(type => "getopt", id => "aggregate", call => \&getopt);
hook(type => "getsetup", id => "aggregate", call => \&getsetup);
- hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+ hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+ last => 1);
hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
hook(type => "preprocess", id => "aggregate", call => \&preprocess);
hook(type => "delete", id => "aggregate", call => \&delete);
if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
hook(type => "cgi", id => "aggregate", call => \&cgi);
}
-} # }}}
+}
-sub getopt () { #{{{
+sub getopt () {
eval q{use Getopt::Long};
error($@) if $@;
Getopt::Long::Configure('pass_through');
"aggregate" => \$config{aggregate},
"aggregateinternal!" => \$config{aggregateinternal},
);
-} #}}}
+}
-sub getsetup () { #{{{
+sub getsetup () {
return
plugin => {
safe => 1,
},
aggregateinternal => {
type => "boolean",
- example => 0,
+ example => 1,
description => "enable aggregation to internal pages?",
safe => 0, # enabling needs manual transition
rebuild => 0,
safe => 1,
rebuild => 0,
},
-} #}}}
+}
+
+sub checkconfig () {
+ if (! defined $config{aggregateinternal}) {
+ $config{aggregateinternal}=1;
+ }
-sub checkconfig () { #{{{
+ # This is done here rather than in a refresh hook because it
+ # needs to run before the wiki is locked.
if ($config{aggregate} && ! ($config{post_commit} &&
IkiWiki::commit_hook_enabled())) {
launchaggregation();
}
-} #}}}
+}
-sub cgi ($) { #{{{
+sub cgi ($) {
my $cgi=shift;
if (defined $cgi->param('do') &&
}
exit 0;
}
-} #}}}
+}
-sub launchaggregation () { #{{{
+sub launchaggregation () {
# See if any feeds need aggregation.
loadstate();
my @feeds=needsaggregate();
return unless @feeds;
if (! lockaggregate()) {
- debug("an aggregation process is already running");
- return;
+ error("an aggregation process is already running");
}
# force a later rebuild of source pages
$IkiWiki::forcerebuild{$_->{sourcepage}}=1
unlockaggregate();
return 1;
-} #}}}
+}
# Pages with extension _aggregated have plain html markup, pass through.
-sub htmlize (@) { #{{{
+sub htmlize (@) {
my %params=@_;
return $params{content};
-} #}}}
+}
# Used by ikiwiki-transition aggregateinternal.
-sub migrate_to_internal { #{{{
+sub migrate_to_internal {
if (! lockaggregate()) {
error("an aggregation process is currently running");
}
$config{aggregateinternal} = 0;
my $oldname = "$config{srcdir}/".htmlfn($data->{page});
+ if (! -e $oldname) {
+ $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
+ }
+
my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
$config{aggregateinternal} = 1;
- my $newname = "$config{srcdir}/".htmlfn($data->{page});
+ my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
debug "moving $oldname -> $newname";
if (-e $newname) {
if (-e $oldoutput) {
require IkiWiki::Render;
debug("removing output file $oldoutput");
- IkiWiki::prune($oldoutput);
+ IkiWiki::prune($oldoutput, $config{destdir});
}
}
IkiWiki::unlockwiki;
unlockaggregate();
-} #}}}
+}
-sub needsbuild (@) { #{{{
+sub needsbuild (@) {
my $needsbuild=shift;
loadstate();
markunseen($feed->{sourcepage});
}
}
-} # }}}
-sub preprocess (@) { #{{{
+ return $needsbuild;
+}
+
+sub preprocess (@) {
my %params=@_;
foreach my $required (qw{name url}) {
$feed->{template}=$params{template} . ".tmpl";
delete $feed->{unseen};
$feed->{lastupdate}=0 unless defined $feed->{lastupdate};
+ $feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
$feed->{numposts}=0 unless defined $feed->{numposts};
$feed->{newposts}=0 unless defined $feed->{newposts};
$feed->{message}=gettext("new feed") unless defined $feed->{message};
($feed->{newposts} ? "; ".$feed->{newposts}.
" ".gettext("new") : "").
")";
-} # }}}
+}
-sub delete (@) { #{{{
+sub delete (@) {
my @files=@_;
# Remove feed data for removed pages.
my $page=pagename($file);
markunseen($page);
}
-} #}}}
+}
-sub markunseen ($) { #{{{
+sub markunseen ($) {
my $page=shift;
foreach my $id (keys %feeds) {
$feeds{$id}->{unseen}=1;
}
}
-} #}}}
+}
my $state_loaded=0;
-sub loadstate () { #{{{
+sub loadstate () {
return if $state_loaded;
$state_loaded=1;
if (-e "$config{wikistatedir}/aggregate") {
- open(IN, "$config{wikistatedir}/aggregate") ||
+ open(IN, "<", "$config{wikistatedir}/aggregate") ||
die "$config{wikistatedir}/aggregate: $!";
while (<IN>) {
$_=IkiWiki::possibly_foolish_untaint($_);
close IN;
}
-} #}}}
+}
-sub savestate () { #{{{
+sub savestate () {
return unless $state_loaded;
garbage_collect();
my $newfile="$config{wikistatedir}/aggregate.new";
my $cleanup = sub { unlink($newfile) };
- open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+ open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
foreach my $data (values %feeds, values %guids) {
my @line;
foreach my $field (keys %$data) {
push @line, "tag=$_" foreach @{$data->{tags}};
}
else {
- push @line, "$field=".$data->{$field};
+ push @line, "$field=".$data->{$field}
+ if defined $data->{$field};
}
}
print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
close OUT || error("save $newfile: $!", $cleanup);
rename($newfile, "$config{wikistatedir}/aggregate") ||
error("rename $newfile: $!", $cleanup);
-} #}}}
-sub garbage_collect () { #{{{
+ my $timestamp=undef;
+ foreach my $feed (keys %feeds) {
+ my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
+ if (! defined $timestamp || $timestamp > $t) {
+ $timestamp=$t;
+ }
+ }
+ $newfile=~s/\.new$/time/;
+ open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
+ if (defined $timestamp) {
+ print OUT $timestamp."\n";
+ }
+ close OUT || error("save $newfile: $!", $cleanup);
+}
+
+sub garbage_collect () {
foreach my $name (keys %feeds) {
# remove any feeds that were not seen while building the pages
# that used to contain them
foreach my $guid (values %guids) {
# any guid whose feed is gone should be removed
if (! exists $feeds{$guid->{feed}}) {
- unlink "$config{srcdir}/".htmlfn($guid->{page})
- if exists $guid->{page};
+ if (exists $guid->{page}) {
+ unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
+ || unlink "$config{srcdir}/".htmlfn($guid->{page});
+ }
delete $guids{$guid->{guid}};
}
# handle expired guids
elsif ($guid->{expired} && exists $guid->{page}) {
unlink "$config{srcdir}/".htmlfn($guid->{page});
+ unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
delete $guid->{page};
delete $guid->{md5};
}
}
-} #}}}
+}
-sub mergestate () { #{{{
+sub mergestate () {
# Load the current state in from disk, and merge into it
# values from the state in memory that might have changed
# during aggregation.
# fields.
foreach my $name (keys %myfeeds) {
if (exists $feeds{$name}) {
- foreach my $field (qw{message lastupdate numposts
- newposts error}) {
+ foreach my $field (qw{message lastupdate lasttry
+ numposts newposts error}) {
$feeds{$name}->{$field}=$myfeeds{$name}->{$field};
}
}
}
# New guids can be created during aggregation.
+ # Guids have a few fields that may be updated during aggregation.
# It's also possible that guids were removed from the on-disk state
# while the aggregation was in process. That would only happen if
# their feed was also removed, so any removed guids added back here
if (! exists $guids{$guid}) {
$guids{$guid}=$myguids{$guid};
}
+ else {
+ foreach my $field (qw{md5}) {
+ $guids{$guid}->{$field}=$myguids{$guid}->{$field};
+ }
+ }
}
-} #}}}
+}
-sub clearstate () { #{{{
+sub clearstate () {
%feeds=();
%guids=();
$state_loaded=0;
-} #}}}
+}
-sub expire () { #{{{
+sub expire () {
foreach my $feed (values %feeds) {
next unless $feed->{expireage} || $feed->{expirecount};
my $count=0;
}
}
}
-} #}}}
+}
-sub needsaggregate () { #{{{
+sub needsaggregate () {
return values %feeds if $config{rebuild};
return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
-} #}}}
+}
-sub aggregate (@) { #{{{
+sub aggregate (@) {
+ eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
eval q{use XML::Feed};
error($@) if $@;
eval q{use URI::Fetch};
error($@) if $@;
foreach my $feed (@_) {
- $feed->{lastupdate}=time;
+ $feed->{lasttry}=time;
$feed->{newposts}=0;
- $feed->{message}=sprintf(gettext("processed ok at %s"),
- displaytime($feed->{lastupdate}));
+ $feed->{message}=sprintf(gettext("last checked %s"),
+ displaytime($feed->{lasttry}));
$feed->{error}=0;
debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
}
$feed->{feedurl}=pop @urls;
}
- my $res=URI::Fetch->fetch($feed->{feedurl});
+ # Using the for_url parameter makes sure we crash if used
+ # with an older IkiWiki.pm that didn't automatically try
+ # to use LWPx::ParanoidAgent.
+ my $ua=useragent(for_url => $feed->{feedurl});
+ my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
if (! $res) {
$feed->{message}=URI::Fetch->errstr;
$feed->{error}=1;
debug($feed->{message});
next;
}
+
+ # lastupdate is only set if we were able to contact the server
+ $feed->{lastupdate}=$feed->{lasttry};
+
if ($res->status == URI::Fetch::URI_GONE()) {
$feed->{message}=gettext("feed not found");
$feed->{error}=1;
next;
}
my $content=$res->content;
+
+ # This is a hack to support the media:content extension
+ # to RSS. XML::Feed does not support it, but it's the same
+ # as an enclosure, so converting it to that tag will let it
+ # parse.
+ $content=~s/<media:content/<enclosure/g;
+ $content=~s/<\/media:content/<\/enclosure/g;
+
my $f=eval{XML::Feed->parse(\$content)};
if ($@) {
# One common cause of XML::Feed crashing is a feed
# that contains invalid UTF-8 sequences. Convert
# feed to ascii to try to work around.
$feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
- $content=Encode::decode_utf8($content, 0);
- $f=eval{XML::Feed->parse(\$content)};
+ $f=eval {
+ $content=Encode::decode_utf8($content, 0);
+ XML::Feed->parse(\$content)
+ };
}
if ($@) {
# Another possibility is badly escaped entities.
$feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
$content=~s/\&(?!amp)(\w+);/&$1;/g;
- $content=Encode::decode_utf8($content, 0);
- $f=eval{XML::Feed->parse(\$content)};
+ $f=eval {
+ $content=Encode::decode_utf8($content, 0);
+ XML::Feed->parse(\$content)
+ };
}
if ($@) {
- $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
+ # gettext can clobber $@
+ my $error = $@;
+ $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)";
$feed->{error}=1;
debug($feed->{message});
next;
}
foreach my $entry ($f->entries) {
- my $content=$content=$entry->content->body;
+ # XML::Feed doesn't work around XML::Atom's bizarre
+ # API, so we will. Real unicode strings? Yes please.
+ # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+ no warnings 'once';
+ local $XML::Atom::ForceUnicode = 1;
+ use warnings;
+
+ my $c=$entry->content;
# atom feeds may have no content, only a summary
- if (! defined $content && ref $entry->summary) {
- $content=$entry->summary->body;
+ if (! defined $c && ref $entry->summary) {
+ $c=$entry->summary;
}
add_page(
feed => $feed,
copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
+ author => defined $entry->author ? decode_entities($entry->author) : "",
link => $entry->link,
- content => defined $content ? $content : "",
+ enclosureurl => defined $entry->enclosure ? $entry->enclosure->url : "",
+ enclosureimage => (defined $entry->enclosure && $entry->enclosure->type =~ m/image\//) ? "1" : "",
+ enclosureaudio => (defined $entry->enclosure && $entry->enclosure->type =~ m/audio\//) ? "1" : "",
+ enclosurevideo => (defined $entry->enclosure && $entry->enclosure->type =~ m/video\//) ? "1" : "",
+ content => (defined $c && defined $c->body) ? $c->body : "",
guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
+ base => (defined $c && $c->can("base")) ? $c->base : undef,
);
}
}
-} #}}}
+}
-sub add_page (@) { #{{{
+sub add_page (@) {
my %params=@_;
my $feed=$params{feed};
# updating an existing post
$guid=$guids{$params{guid}};
return if $guid->{expired};
+ write_page($feed, $guid, $mtime, \%params);
}
else {
# new post
# escape slashes and periods in title so it doesn't specify
# directory name or trigger ".." disallowing code.
$page=~s!([/.])!"__".ord($1)."__"!eg;
- $page=$feed->{dir}."/".$page;
- ($page)=$page=~/$config{wiki_file_regexp}/;
if (! defined $page || ! length $page) {
$page=$feed->{dir}."/item";
}
+ $page=$feed->{dir}."/".$page;
+ ($page)=$page=~/$config{wiki_file_regexp}/;
my $c="";
while (exists $IkiWiki::pagecase{lc $page.$c} ||
+ -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
-e "$config{srcdir}/".htmlfn($page.$c)) {
$c++
}
+ $page=$page.$c;
- # Make sure that the file name isn't too long.
- # NB: This doesn't check for path length limits.
- my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
- if (defined $max && length(htmlfn($page)) >= $max) {
+ $guid->{page}=$page;
+ eval { write_page($feed, $guid, $mtime, \%params) };
+ if ($@) {
+ # assume failure was due to a too long filename
$c="";
$page=$feed->{dir}."/item";
while (exists $IkiWiki::pagecase{lc $page.$c} ||
- -e "$config{srcdir}/".htmlfn($page.$c)) {
+ -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
+ -e "$config{srcdir}/".htmlfn($page.$c)) {
$c++
}
+ $page=$page.$c;
+
+ $guid->{page}=$page;
+ write_page($feed, $guid, $mtime, \%params);
}
- $guid->{page}=$page;
debug(sprintf(gettext("creating new page %s"), $page));
}
+}
+
+sub write_page ($$$$$) {
+ my $feed=shift;
+ my $guid=shift;
+ my $mtime=shift;
+ my %params=%{shift()};
+
$guid->{feed}=$feed->{name};
# To write or not to write? Need to avoid writing unchanged pages
$guid->{md5}=$digest;
# Create the page.
- my $template=template($feed->{template}, blind_cache => 1);
+ my $template;
+ eval {
+ $template=template($feed->{template}, blind_cache => 1);
+ };
+ if ($@) {
+ # gettext can clobber $@
+ my $error = $@;
+ print STDERR gettext("failed to process template:")." $error";
+ return;
+ }
$template->param(title => $params{title})
if defined $params{title} && length($params{title});
- $template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
+ $template->param(author => $params{author})
+ if defined $params{author} && length($params{author}
+ && $params{author} ne $feed->{name});
+ $template->param(content => wikiescape(htmlabs($params{content},
+ defined $params{base} ? $params{base} : $feed->{feedurl})));
$template->param(name => $feed->{name});
$template->param(url => $feed->{url});
$template->param(copyright => $params{copyright})
if defined $params{copyright} && length $params{copyright};
- $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
+ $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
if defined $params{link};
+ $template->param(enclosureurl => $params{enclosureurl})
+ if defined $params{enclosureurl} && length $params{enclosureurl};
+ $template->param(enclosureimage => $params{enclosureimage})
+ if defined $params{enclosureimage} && length $params{enclosureimage};
+ $template->param(enclosureaudio => $params{enclosureaudio})
+ if defined $params{enclosureaudio} && length $params{enclosureaudio};
+ $template->param(enclosurevideo => $params{enclosurevideo})
+ if defined $params{enclosurevideo} && length $params{enclosurevideo};
if (ref $feed->{tags}) {
$template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
}
- writefile(htmlfn($guid->{page}), $config{srcdir},
- $template->output);
+ writefile(htmlfn($guid->{page}),
+ $IkiWiki::Plugin::transient::transientdir, $template->output);
if (defined $mtime && $mtime <= time) {
# Set the mtime, this lets the build process get the right
# creation time on record for the new page.
- utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
+ utime $mtime, $mtime,
+ $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
# Store it in pagectime for expiry code to use also.
- $IkiWiki::pagectime{$guid->{page}}=$mtime;
+ $IkiWiki::pagectime{$guid->{page}}=$mtime
+ unless exists $IkiWiki::pagectime{$guid->{page}};
}
else {
# Dummy value for expiry code.
- $IkiWiki::pagectime{$guid->{page}}=time;
+ $IkiWiki::pagectime{$guid->{page}}=time
+ unless exists $IkiWiki::pagectime{$guid->{page}};
}
-} #}}}
+}
-sub htmlescape ($) { #{{{
+sub wikiescape ($) {
# escape accidental wikilinks and preprocessor stuff
- my $html=shift;
- $html=~s/(?<!\\)\[\[/\\\[\[/g;
- return $html;
-} #}}}
-
-sub urlabs ($$) { #{{{
- my $url=shift;
- my $urlbase=shift;
-
- URI->new_abs($url, $urlbase)->as_string;
-} #}}}
+ return encode_entities(shift, '\[\]');
+}
-sub htmlabs ($$) { #{{{
+sub htmlabs ($$) {
# Convert links in html from relative to absolute.
# Note that this is a heuristic, which is not specified by the rss
# spec and may not be right for all feeds. Also, see Debian
next unless $v_offset; # 0 v_offset means no value
my $v = substr($text, $v_offset, $v_len);
$v =~ s/^([\'\"])(.*)\1$/$2/;
- my $new_v=urlabs($v, $urlbase);
+ my $new_v=IkiWiki::urlabs($v, $urlbase);
$new_v =~ s/\"/"/g; # since we quote with ""
substr($text, $v_offset, $v_len) = qq("$new_v");
}
$p->eof;
return $ret;
-} #}}}
+}
-sub htmlfn ($) { #{{{
+sub htmlfn ($) {
return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
-} #}}}
+}
my $aggregatelock;
-sub lockaggregate () { #{{{
+sub lockaggregate () {
# Take an exclusive lock to prevent multiple concurrent aggregators.
# Returns true if the lock was aquired.
if (! -d $config{wikistatedir}) {
return 0;
}
return 1;
-} #}}}
+}
-sub unlockaggregate () { #{{{
+sub unlockaggregate () {
return close($aggregatelock) if $aggregatelock;
return;
-} #}}}
+}
1