safe => 1,
rebuild => 0,
},
- cookiejar => {
- type => "string",
- example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
- safe => 0, # hooks into perl module internals
- description => "cookie control",
- },
}
sub checkconfig () {
if (! defined $config{aggregateinternal}) {
$config{aggregateinternal}=1;
}
- if (! defined $config{cookiejar}) {
- $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
- }
# This is done here rather than in a refresh hook because it
# needs to run before the wiki is locked.
my @feeds=needsaggregate();
return unless @feeds;
if (! lockaggregate()) {
- debug("an aggregation process is already running");
- return;
+ error("an aggregation process is already running");
}
# force a later rebuild of source pages
$IkiWiki::forcerebuild{$_->{sourcepage}}=1
}
$feed->{feedurl}=pop @urls;
}
- my $res=URI::Fetch->fetch($feed->{feedurl},
- UserAgent => LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- ),
- );
+ # Using the for_url parameter makes sure we crash if used
+ # with an older IkiWiki.pm that didn't automatically try
+ # to use LWPx::ParanoidAgent.
+ my $ua=useragent(for_url => $feed->{feedurl});
+ my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
if (! $res) {
$feed->{message}=URI::Fetch->errstr;
$feed->{error}=1;
next;
}
my $content=$res->content;
+
+ # This is a hack to support the media:content extension
+ # to RSS. XML::Feed does not support it, but it's the same
+ # as an enclosure, so converting it to that tag will let it
+ # parse.
+ $content=~s/<media:content/<enclosure/g;
+ $content=~s/<\/media:content/<\/enclosure/g;
+
my $f=eval{XML::Feed->parse(\$content)};
if ($@) {
# One common cause of XML::Feed crashing is a feed
};
}
if ($@) {
- $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
+ # gettext can clobber $@
+ my $error = $@;
+ $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)";
$feed->{error}=1;
debug($feed->{message});
next;
# XML::Feed doesn't work around XML::Atom's bizarre
# API, so we will. Real unicode strings? Yes please.
# See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+ no warnings 'once';
local $XML::Atom::ForceUnicode = 1;
+ use warnings;
my $c=$entry->content;
# atom feeds may have no content, only a summary
feed => $feed,
copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
+ author => defined $entry->author ? decode_entities($entry->author) : "",
link => $entry->link,
+ enclosureurl => defined $entry->enclosure ? $entry->enclosure->url : "",
+ enclosureimage => (defined $entry->enclosure && $entry->enclosure->type =~ m/image\//) ? "1" : "",
+ enclosureaudio => (defined $entry->enclosure && $entry->enclosure->type =~ m/audio\//) ? "1" : "",
+ enclosurevideo => (defined $entry->enclosure && $entry->enclosure->type =~ m/video\//) ? "1" : "",
content => (defined $c && defined $c->body) ? $c->body : "",
guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
# escape slashes and periods in title so it doesn't specify
# directory name or trigger ".." disallowing code.
$page=~s!([/.])!"__".ord($1)."__"!eg;
- $page=$feed->{dir}."/".$page;
- ($page)=$page=~/$config{wiki_file_regexp}/;
if (! defined $page || ! length $page) {
$page=$feed->{dir}."/item";
}
+ $page=$feed->{dir}."/".$page;
+ ($page)=$page=~/$config{wiki_file_regexp}/;
my $c="";
while (exists $IkiWiki::pagecase{lc $page.$c} ||
-e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
-e "$config{srcdir}/".htmlfn($page.$c)) {
$c++
}
+ $page=$page.$c;
$guid->{page}=$page;
eval { write_page($feed, $guid, $mtime, \%params) };
if ($@) {
# assume failure was due to a too long filename
- # (or o
$c="";
$page=$feed->{dir}."/item";
while (exists $IkiWiki::pagecase{lc $page.$c} ||
-e "$config{srcdir}/".htmlfn($page.$c)) {
$c++
}
+ $page=$page.$c;
$guid->{page}=$page;
write_page($feed, $guid, $mtime, \%params);
$template=template($feed->{template}, blind_cache => 1);
};
if ($@) {
- print STDERR gettext("failed to process template:")." $@";
+ # gettext can clobber $@
+ my $error = $@;
+ print STDERR gettext("failed to process template:")." $error";
return;
}
$template->param(title => $params{title})
if defined $params{title} && length($params{title});
+ $template->param(author => $params{author})
+ if defined $params{author} && length($params{author}
+ && $params{author} ne $feed->{name});
$template->param(content => wikiescape(htmlabs($params{content},
defined $params{base} ? $params{base} : $feed->{feedurl})));
$template->param(name => $feed->{name});
if defined $params{copyright} && length $params{copyright};
$template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
if defined $params{link};
+ $template->param(enclosureurl => $params{enclosureurl})
+ if defined $params{enclosureurl} && length $params{enclosureurl};
+ $template->param(enclosureimage => $params{enclosureimage})
+ if defined $params{enclosureimage} && length $params{enclosureimage};
+ $template->param(enclosureaudio => $params{enclosureaudio})
+ if defined $params{enclosureaudio} && length $params{enclosureaudio};
+ $template->param(enclosurevideo => $params{enclosurevideo})
+ if defined $params{enclosurevideo} && length $params{enclosurevideo};
if (ref $feed->{tags}) {
$template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
}