X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/101321752362ba3117909fb5913edc2d396f9133..8d4342183b1c3a96797def6fff96feebacb90db6:/IkiWiki/Plugin/aggregate.pm diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index 614c3fa55..be7da3a71 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -58,22 +58,15 @@ sub getsetup () { safe => 1, rebuild => 0, }, - cookiejar => { - type => "string", - example => { file => "$ENV{HOME}/.ikiwiki/cookies" }, - safe => 0, # hooks into perl module internals - description => "cookie control", - }, } sub checkconfig () { if (! defined $config{aggregateinternal}) { $config{aggregateinternal}=1; } - if (! defined $config{cookies}) { - $config{cookies}={ file => "$ENV{HOME}/.ikiwiki/cookies" }; - } + # This is done here rather than in a refresh hook because it + # needs to run before the wiki is locked. if ($config{aggregate} && ! ($config{post_commit} && IkiWiki::commit_hook_enabled())) { launchaggregation(); @@ -111,8 +104,7 @@ sub launchaggregation () { my @feeds=needsaggregate(); return unless @feeds; if (! lockaggregate()) { - debug("an aggregation process is already running"); - return; + error("an aggregation process is already running"); } # force a later rebuild of source pages $IkiWiki::forcerebuild{$_->{sourcepage}}=1 @@ -199,7 +191,7 @@ sub migrate_to_internal { if (-e $oldoutput) { require IkiWiki::Render; debug("removing output file $oldoutput"); - IkiWiki::prune($oldoutput); + IkiWiki::prune($oldoutput, $config{destdir}); } } @@ -496,6 +488,7 @@ sub needsaggregate () { } sub aggregate (@) { + eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available eval q{use XML::Feed}; error($@) if $@; eval q{use URI::Fetch}; @@ -520,11 +513,8 @@ sub aggregate (@) { } $feed->{feedurl}=pop @urls; } - my $res=URI::Fetch->fetch($feed->{feedurl}, - UserAgent => LWP::UserAgent->new( - cookie_jar => $config{cookiejar}, - ), - ); + my $ua=useragent(); + my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua); if (! $res) { $feed->{message}=URI::Fetch->errstr; $feed->{error}=1; @@ -563,7 +553,9 @@ sub aggregate (@) { }; } if ($@) { - $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)"; + # gettext can clobber $@ + my $error = $@; + $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)"; $feed->{error}=1; debug($feed->{message}); next; @@ -579,7 +571,9 @@ sub aggregate (@) { # XML::Feed doesn't work around XML::Atom's bizarre # API, so we will. Real unicode strings? Yes please. # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]] + no warnings 'once'; local $XML::Atom::ForceUnicode = 1; + use warnings; my $c=$entry->content; # atom feeds may have no content, only a summary @@ -591,6 +585,7 @@ sub aggregate (@) { feed => $feed, copyright => $f->copyright, title => defined $entry->title ? decode_entities($entry->title) : "untitled", + author => defined $entry->author ? decode_entities($entry->author) : "", link => $entry->link, content => (defined $c && defined $c->body) ? $c->body : "", guid => defined $entry->id ? $entry->id : time."_".$feed->{name}, @@ -611,6 +606,7 @@ sub add_page (@) { # updating an existing post $guid=$guids{$params{guid}}; return if $guid->{expired}; + write_page($feed, $guid, $mtime, \%params); } else { # new post @@ -636,24 +632,35 @@ sub add_page (@) { -e "$config{srcdir}/".htmlfn($page.$c)) { $c++ } + $page=$page.$c; - # Make sure that the file name isn't too long. - # NB: This doesn't check for path length limits. - my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX); - if (defined $max && length(htmlfn($page)) >= $max) { + $guid->{page}=$page; + eval { write_page($feed, $guid, $mtime, \%params) }; + if ($@) { + # assume failure was due to a too long filename $c=""; $page=$feed->{dir}."/item"; while (exists $IkiWiki::pagecase{lc $page.$c} || -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) || - - -e "$config{srcdir}/".htmlfn($page.$c)) { + -e "$config{srcdir}/".htmlfn($page.$c)) { $c++ } + $page=$page.$c; + + $guid->{page}=$page; + write_page($feed, $guid, $mtime, \%params); } - $guid->{page}=$page; debug(sprintf(gettext("creating new page %s"), $page)); } +} + +sub write_page ($$$$$) { + my $feed=shift; + my $guid=shift; + my $mtime=shift; + my %params=%{shift()}; + $guid->{feed}=$feed->{name}; # To write or not to write? Need to avoid writing unchanged pages @@ -672,11 +679,16 @@ sub add_page (@) { $template=template($feed->{template}, blind_cache => 1); }; if ($@) { - print STDERR gettext("failed to process template:")." $@"; + # gettext can clobber $@ + my $error = $@; + print STDERR gettext("failed to process template:")." $error"; return; } $template->param(title => $params{title}) if defined $params{title} && length($params{title}); + $template->param(author => $params{author}) + if defined $params{author} && length($params{author} + && $params{author} ne $feed->{name}); $template->param(content => wikiescape(htmlabs($params{content}, defined $params{base} ? $params{base} : $feed->{feedurl}))); $template->param(name => $feed->{name});