]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/aggregate.pm
(no commit message)
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
index 5e967356025ce0e87bcb4444f7b39cf24dbaaa8c..be7da3a710ec66a6243f414d9de62a49580bf6ad 100644 (file)
@@ -16,7 +16,8 @@ my %guids;
 sub import {
        hook(type => "getopt", id => "aggregate", call => \&getopt);
        hook(type => "getsetup", id => "aggregate", call => \&getsetup);
-       hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+       hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+               last => 1);
        hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
        hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
@@ -57,22 +58,15 @@ sub getsetup () {
                        safe => 1,
                        rebuild => 0,
                },
-               cookiejar => {
-                       type => "string",
-                       example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
-                       safe => 0, # hooks into perl module internals
-                       description => "cookie control",
-               },
 }
 
 sub checkconfig () {
        if (! defined $config{aggregateinternal}) {
                $config{aggregateinternal}=1;
        }
-       if (! defined $config{cookies}) {
-               $config{cookies}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
-       }
 
+       # This is done here rather than in a refresh hook because it
+       # needs to run before the wiki is locked.
        if ($config{aggregate} && ! ($config{post_commit} && 
                                     IkiWiki::commit_hook_enabled())) {
                launchaggregation();
@@ -110,8 +104,7 @@ sub launchaggregation () {
        my @feeds=needsaggregate();
        return unless @feeds;
        if (! lockaggregate()) {
-               debug("an aggregation process is already running");
-               return;
+               error("an aggregation process is already running");
        }
        # force a later rebuild of source pages
        $IkiWiki::forcerebuild{$_->{sourcepage}}=1
@@ -198,7 +191,7 @@ sub migrate_to_internal {
                if (-e $oldoutput) {
                        require IkiWiki::Render;
                        debug("removing output file $oldoutput");
-                       IkiWiki::prune($oldoutput);
+                       IkiWiki::prune($oldoutput, $config{destdir});
                }
        }
        
@@ -495,6 +488,7 @@ sub needsaggregate () {
 }
 
 sub aggregate (@) {
+       eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
        eval q{use XML::Feed};
        error($@) if $@;
        eval q{use URI::Fetch};
@@ -519,11 +513,8 @@ sub aggregate (@) {
                        }
                        $feed->{feedurl}=pop @urls;
                }
-               my $res=URI::Fetch->fetch($feed->{feedurl},
-                       UserAgent => LWP::UserAgent->new(
-                               cookie_jar => $config{cookiejar},
-                       ),
-               );
+               my $ua=useragent();
+               my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
                if (! $res) {
                        $feed->{message}=URI::Fetch->errstr;
                        $feed->{error}=1;
@@ -562,7 +553,9 @@ sub aggregate (@) {
                        };
                }
                if ($@) {
-                       $feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
+                       # gettext can clobber $@
+                       my $error = $@;
+                       $feed->{message}=gettext("feed crashed XML::Feed!")." ($error)";
                        $feed->{error}=1;
                        debug($feed->{message});
                        next;
@@ -578,7 +571,9 @@ sub aggregate (@) {
                        # XML::Feed doesn't work around XML::Atom's bizarre
                        # API, so we will. Real unicode strings? Yes please.
                        # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+                       no warnings 'once';
                        local $XML::Atom::ForceUnicode = 1;
+                       use warnings;
 
                        my $c=$entry->content;
                        # atom feeds may have no content, only a summary
@@ -590,6 +585,7 @@ sub aggregate (@) {
                                feed => $feed,
                                copyright => $f->copyright,
                                title => defined $entry->title ? decode_entities($entry->title) : "untitled",
+                               author => defined $entry->author ? decode_entities($entry->author) : "",
                                link => $entry->link,
                                content => (defined $c && defined $c->body) ? $c->body : "",
                                guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
@@ -610,6 +606,7 @@ sub add_page (@) {
                # updating an existing post
                $guid=$guids{$params{guid}};
                return if $guid->{expired};
+               write_page($feed, $guid, $mtime, \%params);
        }
        else {
                # new post
@@ -635,24 +632,35 @@ sub add_page (@) {
                       -e "$config{srcdir}/".htmlfn($page.$c)) {
                        $c++
                }
+               $page=$page.$c;
 
-               # Make sure that the file name isn't too long. 
-               # NB: This doesn't check for path length limits.
-               my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-               if (defined $max && length(htmlfn($page)) >= $max) {
+               $guid->{page}=$page;
+               eval { write_page($feed, $guid, $mtime, \%params) };
+               if ($@) {
+                       # assume failure was due to a too long filename
                        $c="";
                        $page=$feed->{dir}."/item";
                        while (exists $IkiWiki::pagecase{lc $page.$c} ||
                              -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
-
-                              -e "$config{srcdir}/".htmlfn($page.$c)) {
+                             -e "$config{srcdir}/".htmlfn($page.$c)) {
                                $c++
                        }
+                       $page=$page.$c;
+
+                       $guid->{page}=$page;
+                       write_page($feed, $guid, $mtime, \%params);
                }
 
-               $guid->{page}=$page;
                debug(sprintf(gettext("creating new page %s"), $page));
        }
+}
+
+sub write_page ($$$$$) {
+       my $feed=shift;
+       my $guid=shift;
+       my $mtime=shift;
+       my %params=%{shift()};
+
        $guid->{feed}=$feed->{name};
        
        # To write or not to write? Need to avoid writing unchanged pages
@@ -671,11 +679,16 @@ sub add_page (@) {
                $template=template($feed->{template}, blind_cache => 1);
        };
        if ($@) {
-               print STDERR gettext("failed to process template:")." $@";
+               # gettext can clobber $@
+               my $error = $@;
+               print STDERR gettext("failed to process template:")." $error";
                return;
        }
        $template->param(title => $params{title})
                if defined $params{title} && length($params{title});
+       $template->param(author => $params{author})
+               if defined $params{author} && length($params{author}
+                       && $params{author} ne $feed->{name});
        $template->param(content => wikiescape(htmlabs($params{content},
                defined $params{base} ? $params{base} : $feed->{feedurl})));
        $template->param(name => $feed->{name});