]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/aggregate.pm
Make enclosure follow WikiLink LinkingRules.
[git.ikiwiki.info.git] / IkiWiki / Plugin / aggregate.pm
index c667ee2a9f7429512d6287fdd91bd6422cd1500e..89da5c453aa7e36143ad46271d053f446215b15d 100644 (file)
@@ -8,7 +8,6 @@ use IkiWiki 3.00;
 use HTML::Parser;
 use HTML::Tagset;
 use HTML::Entities;
-use URI;
 use open qw{:utf8 :std};
 
 my %feeds;
@@ -17,7 +16,8 @@ my %guids;
 sub import {
        hook(type => "getopt", id => "aggregate", call => \&getopt);
        hook(type => "getsetup", id => "aggregate", call => \&getsetup);
-       hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+       hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+               last => 1);
        hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
        hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
@@ -58,13 +58,24 @@ sub getsetup () {
                        safe => 1,
                        rebuild => 0,
                },
+               cookiejar => {
+                       type => "string",
+                       example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
+                       safe => 0, # hooks into perl module internals
+                       description => "cookie control",
+               },
 }
 
 sub checkconfig () {
        if (! defined $config{aggregateinternal}) {
                $config{aggregateinternal}=1;
        }
+       if (! defined $config{cookiejar}) {
+               $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
+       }
 
+       # This is done here rather than in a refresh hook because it
+       # needs to run before the wiki is locked.
        if ($config{aggregate} && ! ($config{post_commit} && 
                                     IkiWiki::commit_hook_enabled())) {
                launchaggregation();
@@ -102,8 +113,7 @@ sub launchaggregation () {
        my @feeds=needsaggregate();
        return unless @feeds;
        if (! lockaggregate()) {
-               debug("an aggregation process is already running");
-               return;
+               error("an aggregation process is already running");
        }
        # force a later rebuild of source pages
        $IkiWiki::forcerebuild{$_->{sourcepage}}=1
@@ -163,10 +173,14 @@ sub migrate_to_internal {
                
                $config{aggregateinternal} = 0;
                my $oldname = "$config{srcdir}/".htmlfn($data->{page});
+               if (! -e $oldname) {
+                       $oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
+               }
+
                my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
                
                $config{aggregateinternal} = 1;
-               my $newname = "$config{srcdir}/".htmlfn($data->{page});
+               my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
                
                debug "moving $oldname -> $newname";
                if (-e $newname) {
@@ -186,7 +200,7 @@ sub migrate_to_internal {
                if (-e $oldoutput) {
                        require IkiWiki::Render;
                        debug("removing output file $oldoutput");
-                       IkiWiki::prune($oldoutput);
+                       IkiWiki::prune($oldoutput, $config{destdir});
                }
        }
        
@@ -210,6 +224,8 @@ sub needsbuild (@) {
                        markunseen($feed->{sourcepage});
                }
        }
+
+       return $needsbuild;
 }
 
 sub preprocess (@) {
@@ -298,7 +314,7 @@ sub loadstate () {
        return if $state_loaded;
        $state_loaded=1;
        if (-e "$config{wikistatedir}/aggregate") {
-               open(IN, "$config{wikistatedir}/aggregate") ||
+               open(IN, "<", "$config{wikistatedir}/aggregate") ||
                        die "$config{wikistatedir}/aggregate: $!";
                while (<IN>) {
                        $_=IkiWiki::possibly_foolish_untaint($_);
@@ -335,7 +351,7 @@ sub savestate () {
        garbage_collect();
        my $newfile="$config{wikistatedir}/aggregate.new";
        my $cleanup = sub { unlink($newfile) };
-       open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+       open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
        foreach my $data (values %feeds, values %guids) {
                my @line;
                foreach my $field (keys %$data) {
@@ -356,6 +372,20 @@ sub savestate () {
        close OUT || error("save $newfile: $!", $cleanup);
        rename($newfile, "$config{wikistatedir}/aggregate") ||
                error("rename $newfile: $!", $cleanup);
+
+       my $timestamp=undef;
+       foreach my $feed (keys %feeds) {
+               my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
+               if (! defined $timestamp || $timestamp > $t) {
+                       $timestamp=$t;
+               }
+       }
+       $newfile=~s/\.new$/time/;
+       open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
+       if (defined $timestamp) {
+               print OUT $timestamp."\n";
+       }
+       close OUT || error("save $newfile: $!", $cleanup);
 }
 
 sub garbage_collect () {
@@ -370,13 +400,16 @@ sub garbage_collect () {
        foreach my $guid (values %guids) {
                # any guid whose feed is gone should be removed
                if (! exists $feeds{$guid->{feed}}) {
-                       unlink "$config{srcdir}/".htmlfn($guid->{page})
-                               if exists $guid->{page};
+                       if (exists $guid->{page}) {
+                               unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
+                                       || unlink "$config{srcdir}/".htmlfn($guid->{page});
+                       }
                        delete $guids{$guid->{guid}};
                }
                # handle expired guids
                elsif ($guid->{expired} && exists $guid->{page}) {
                        unlink "$config{srcdir}/".htmlfn($guid->{page});
+                       unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
                        delete $guid->{page};
                        delete $guid->{md5};
                }
@@ -404,6 +437,7 @@ sub mergestate () {
        }
 
        # New guids can be created during aggregation.
+       # Guids have a few fields that may be updated during aggregation.
        # It's also possible that guids were removed from the on-disk state
        # while the aggregation was in process. That would only happen if
        # their feed was also removed, so any removed guids added back here
@@ -412,6 +446,11 @@ sub mergestate () {
                if (! exists $guids{$guid}) {
                        $guids{$guid}=$myguids{$guid};
                }
+               else {
+                       foreach my $field (qw{md5}) {
+                               $guids{$guid}->{$field}=$myguids{$guid}->{$field};
+                       }
+               }
        }
 }
 
@@ -458,6 +497,7 @@ sub needsaggregate () {
 }
 
 sub aggregate (@) {
+       eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
        eval q{use XML::Feed};
        error($@) if $@;
        eval q{use URI::Fetch};
@@ -482,7 +522,11 @@ sub aggregate (@) {
                        }
                        $feed->{feedurl}=pop @urls;
                }
-               my $res=URI::Fetch->fetch($feed->{feedurl});
+               my $res=URI::Fetch->fetch($feed->{feedurl},
+                       UserAgent => LWP::UserAgent->new(
+                               cookie_jar => $config{cookiejar},
+                       ),
+               );
                if (! $res) {
                        $feed->{message}=URI::Fetch->errstr;
                        $feed->{error}=1;
@@ -534,6 +578,11 @@ sub aggregate (@) {
                }
 
                foreach my $entry ($f->entries) {
+                       # XML::Feed doesn't work around XML::Atom's bizarre
+                       # API, so we will. Real unicode strings? Yes please.
+                       # See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+                       local $XML::Atom::ForceUnicode = 1;
+
                        my $c=$entry->content;
                        # atom feeds may have no content, only a summary
                        if (! defined $c && ref $entry->summary) {
@@ -564,6 +613,7 @@ sub add_page (@) {
                # updating an existing post
                $guid=$guids{$params{guid}};
                return if $guid->{expired};
+               write_page($feed, $guid, $mtime, \%params);
        }
        else {
                # new post
@@ -585,25 +635,38 @@ sub add_page (@) {
                }
                my $c="";
                while (exists $IkiWiki::pagecase{lc $page.$c} ||
+                      -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
                       -e "$config{srcdir}/".htmlfn($page.$c)) {
                        $c++
                }
 
-               # Make sure that the file name isn't too long. 
-               # NB: This doesn't check for path length limits.
-               my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-               if (defined $max && length(htmlfn($page)) >= $max) {
+               $guid->{page}=$page;
+               eval { write_page($feed, $guid, $mtime, \%params) };
+               if ($@) {
+                       # assume failure was due to a too long filename
+                       # (or o
                        $c="";
                        $page=$feed->{dir}."/item";
                        while (exists $IkiWiki::pagecase{lc $page.$c} ||
-                              -e "$config{srcdir}/".htmlfn($page.$c)) {
+                             -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
+                             -e "$config{srcdir}/".htmlfn($page.$c)) {
                                $c++
                        }
+
+                       $guid->{page}=$page;
+                       write_page($feed, $guid, $mtime, \%params);
                }
 
-               $guid->{page}=$page;
                debug(sprintf(gettext("creating new page %s"), $page));
        }
+}
+
+sub write_page ($$$$$) {
+       my $feed=shift;
+       my $guid=shift;
+       my $mtime=shift;
+       my %params=%{shift()};
+
        $guid->{feed}=$feed->{name};
        
        # To write or not to write? Need to avoid writing unchanged pages
@@ -617,7 +680,14 @@ sub add_page (@) {
        $guid->{md5}=$digest;
 
        # Create the page.
-       my $template=template($feed->{template}, blind_cache => 1);
+       my $template;
+       eval {
+               $template=template($feed->{template}, blind_cache => 1);
+       };
+       if ($@) {
+               print STDERR gettext("failed to process template:")." $@";
+               return;
+       }
        $template->param(title => $params{title})
                if defined $params{title} && length($params{title});
        $template->param(content => wikiescape(htmlabs($params{content},
@@ -626,24 +696,27 @@ sub add_page (@) {
        $template->param(url => $feed->{url});
        $template->param(copyright => $params{copyright})
                if defined $params{copyright} && length $params{copyright};
-       $template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
+       $template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
                if defined $params{link};
        if (ref $feed->{tags}) {
                $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
        }
-       writefile(htmlfn($guid->{page}), $config{srcdir},
-               $template->output);
+       writefile(htmlfn($guid->{page}),
+               $IkiWiki::Plugin::transient::transientdir, $template->output);
 
        if (defined $mtime && $mtime <= time) {
                # Set the mtime, this lets the build process get the right
                # creation time on record for the new page.
-               utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
+               utime $mtime, $mtime,
+                       $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
                # Store it in pagectime for expiry code to use also.
-               $IkiWiki::pagectime{$guid->{page}}=$mtime;
+               $IkiWiki::pagectime{$guid->{page}}=$mtime
+                       unless exists $IkiWiki::pagectime{$guid->{page}};
        }
        else {
                # Dummy value for expiry code.
-               $IkiWiki::pagectime{$guid->{page}}=time;
+               $IkiWiki::pagectime{$guid->{page}}=time
+                       unless exists $IkiWiki::pagectime{$guid->{page}};
        }
 }
 
@@ -652,13 +725,6 @@ sub wikiescape ($) {
        return encode_entities(shift, '\[\]');
 }
 
-sub urlabs ($$) {
-       my $url=shift;
-       my $urlbase=shift;
-
-       URI->new_abs($url, $urlbase)->as_string;
-}
-
 sub htmlabs ($$) {
        # Convert links in html from relative to absolute.
        # Note that this is a heuristic, which is not specified by the rss
@@ -684,7 +750,7 @@ sub htmlabs ($$) {
                                next unless $v_offset; # 0 v_offset means no value
                                my $v = substr($text, $v_offset, $v_len);
                                $v =~ s/^([\'\"])(.*)\1$/$2/;
-                               my $new_v=urlabs($v, $urlbase);
+                               my $new_v=IkiWiki::urlabs($v, $urlbase);
                                $new_v =~ s/\"/&quot;/g; # since we quote with ""
                                substr($text, $v_offset, $v_len) = qq("$new_v");
                        }