X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/5674e7fc1273800554c23ad6194e8a06dee851ae..09e7c1ad99367eb00d56cfc1e6c64e9b0e361dc4:/IkiWiki/Plugin/aggregate.pm diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 83bd670cb..fbf88c627 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -58,21 +58,12 @@ sub getsetup () { safe => 1, rebuild => 0, }, - cookiejar => { - type => "string", - example => { file => "$ENV{HOME}/.ikiwiki/cookies" }, - safe => 0, # hooks into perl module internals - description => "cookie control", - }, } sub checkconfig () { if (! defined $config{aggregateinternal}) { $config{aggregateinternal}=1; } - if (! defined $config{cookiejar}) { - $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" }; - } # This is done here rather than in a refresh hook because it # needs to run before the wiki is locked. @@ -113,8 +104,7 @@ sub launchaggregation () { my @feeds=needsaggregate(); return unless @feeds; if (! lockaggregate()) { - debug("an aggregation process is already running"); - return; + error("an aggregation process is already running"); } # force a later rebuild of source pages $IkiWiki::forcerebuild{$_->{sourcepage}}=1 @@ -523,11 +513,8 @@ sub aggregate (@) { } $feed->{feedurl}=pop @urls; } - my $res=URI::Fetch->fetch($feed->{feedurl}, - UserAgent => LWP::UserAgent->new( - cookie_jar => $config{cookiejar}, - ), - ); + my $ua=useragent(); + my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua); if (! $res) { $feed->{message}=URI::Fetch->errstr; $feed->{error}=1; @@ -566,7 +553,9 @@ sub aggregate (@) { }; } if ($@) { - $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; + # gettext can clobber $@ + my $error = $@; + $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)"; $feed->{error}=1; debug($feed->{message}); next; @@ -594,6 +583,7 @@ sub aggregate (@) { feed => $feed, copyright => $f->copyright, title => defined $entry->title ? decode_entities($entry->title) : "untitled", + author => defined $entry->author ? decode_entities($entry->author) : "", link => $entry->link, content => (defined $c && defined $c->body) ? $c->body : "", guid => defined $entry->id ? $entry->id : time."_".$feed->{name}, @@ -640,12 +630,12 @@ sub add_page (@) { -e "$config{srcdir}/".htmlfn($page.$c)) { $c++ } + $page=$page.$c; $guid->{page}=$page; eval { write_page($feed, $guid, $mtime, \%params) }; if ($@) { # assume failure was due to a too long filename - # (or o $c=""; $page=$feed->{dir}."/item"; while (exists $IkiWiki::pagecase{lc $page.$c} || @@ -653,6 +643,7 @@ sub add_page (@) { -e "$config{srcdir}/".htmlfn($page.$c)) { $c++ } + $page=$page.$c; $guid->{page}=$page; write_page($feed, $guid, $mtime, \%params); @@ -686,11 +677,16 @@ sub write_page ($$$$$) { $template=template($feed->{template}, blind_cache => 1); }; if ($@) { - print STDERR gettext("failed to process template:")." $@"; + # gettext can clobber $@ + my $error = $@; + print STDERR gettext("failed to process template:")." $error"; return; } $template->param(title => $params{title}) if defined $params{title} && length($params{title}); + $template->param(author => $params{author}) + if defined $params{author} && length($params{author} + && $params{author} ne $feed->{name}); $template->param(content => wikiescape(htmlabs($params{content}, defined $params{base} ? $params{base} : $feed->{feedurl}))); $template->param(name => $feed->{name});