X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/914c839ceb02fdbb36a49aa6548dc95beaf59ea4..94268a46cd30fc72b51714e42e9db741eb29cc73:/IkiWiki/Plugin/aggregate.pm?ds=sidebyside

diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
index c667ee2a9..4a704617e 100644
--- a/IkiWiki/Plugin/aggregate.pm
+++ b/IkiWiki/Plugin/aggregate.pm
@@ -8,7 +8,6 @@ use IkiWiki 3.00;
 use HTML::Parser;
 use HTML::Tagset;
 use HTML::Entities;
-use URI;
 use open qw{:utf8 :std};
 
 my %feeds;
@@ -17,7 +16,8 @@ my %guids;
 sub import {
 	hook(type => "getopt", id => "aggregate", call => \&getopt);
 	hook(type => "getsetup", id => "aggregate", call => \&getsetup);
-	hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+	hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+		last => 1);
 	hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
 	hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
@@ -58,13 +58,24 @@ sub getsetup () {
 			safe => 1,
 			rebuild => 0,
 		},
+		cookiejar => {
+			type => "string",
+			example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
+			safe => 0, # hooks into perl module internals
+			description => "cookie control",
+		},
 }
 
 sub checkconfig () {
 	if (! defined $config{aggregateinternal}) {
 		$config{aggregateinternal}=1;
 	}
+	if (! defined $config{cookiejar}) {
+		$config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
+	}
 
+	# This is done here rather than in a refresh hook because it
+	# needs to run before the wiki is locked.
 	if ($config{aggregate} && ! ($config{post_commit} && 
 	                             IkiWiki::commit_hook_enabled())) {
 		launchaggregation();
@@ -163,10 +174,14 @@ sub migrate_to_internal {
 		
 		$config{aggregateinternal} = 0;
 		my $oldname = "$config{srcdir}/".htmlfn($data->{page});
+		if (! -e $oldname) {
+			$oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
+		}
+
 		my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
 		
 		$config{aggregateinternal} = 1;
-		my $newname = "$config{srcdir}/".htmlfn($data->{page});
+		my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
 		
 		debug "moving $oldname -> $newname";
 		if (-e $newname) {
@@ -210,6 +225,8 @@ sub needsbuild (@) {
 			markunseen($feed->{sourcepage});
 		}
 	}
+
+	return $needsbuild;
 }
 
 sub preprocess (@) {
@@ -298,7 +315,7 @@ sub loadstate () {
 	return if $state_loaded;
 	$state_loaded=1;
 	if (-e "$config{wikistatedir}/aggregate") {
-		open(IN, "$config{wikistatedir}/aggregate") ||
+		open(IN, "<", "$config{wikistatedir}/aggregate") ||
 			die "$config{wikistatedir}/aggregate: $!";
 		while (<IN>) {
 			$_=IkiWiki::possibly_foolish_untaint($_);
@@ -335,7 +352,7 @@ sub savestate () {
 	garbage_collect();
 	my $newfile="$config{wikistatedir}/aggregate.new";
 	my $cleanup = sub { unlink($newfile) };
-	open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+	open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
 	foreach my $data (values %feeds, values %guids) {
 		my @line;
 		foreach my $field (keys %$data) {
@@ -356,6 +373,20 @@ sub savestate () {
 	close OUT || error("save $newfile: $!", $cleanup);
 	rename($newfile, "$config{wikistatedir}/aggregate") ||
 		error("rename $newfile: $!", $cleanup);
+
+	my $timestamp=undef;
+	foreach my $feed (keys %feeds) {
+		my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
+		if (! defined $timestamp || $timestamp > $t) {
+			$timestamp=$t;
+		}
+	}
+	$newfile=~s/\.new$/time/;
+	open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
+	if (defined $timestamp) {
+		print OUT $timestamp."\n";
+	}
+	close OUT || error("save $newfile: $!", $cleanup);
 }
 
 sub garbage_collect () {
@@ -370,13 +401,16 @@ sub garbage_collect () {
 	foreach my $guid (values %guids) {
 		# any guid whose feed is gone should be removed
 		if (! exists $feeds{$guid->{feed}}) {
-			unlink "$config{srcdir}/".htmlfn($guid->{page})
-				if exists $guid->{page};
+			if (exists $guid->{page}) {
+				unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
+					|| unlink "$config{srcdir}/".htmlfn($guid->{page});
+			}
 			delete $guids{$guid->{guid}};
 		}
 		# handle expired guids
 		elsif ($guid->{expired} && exists $guid->{page}) {
 			unlink "$config{srcdir}/".htmlfn($guid->{page});
+			unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
 			delete $guid->{page};
 			delete $guid->{md5};
 		}
@@ -404,6 +438,7 @@ sub mergestate () {
 	}
 
 	# New guids can be created during aggregation.
+	# Guids have a few fields that may be updated during aggregation.
 	# It's also possible that guids were removed from the on-disk state
 	# while the aggregation was in process. That would only happen if
 	# their feed was also removed, so any removed guids added back here
@@ -412,6 +447,11 @@ sub mergestate () {
 		if (! exists $guids{$guid}) {
 			$guids{$guid}=$myguids{$guid};
 		}
+		else {
+			foreach my $field (qw{md5}) {
+				$guids{$guid}->{$field}=$myguids{$guid}->{$field};
+			}
+		}
 	}
 }
 
@@ -458,6 +498,7 @@ sub needsaggregate () {
 }
 
 sub aggregate (@) {
+	eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
 	eval q{use XML::Feed};
 	error($@) if $@;
 	eval q{use URI::Fetch};
@@ -482,7 +523,11 @@ sub aggregate (@) {
 			}
 			$feed->{feedurl}=pop @urls;
 		}
-		my $res=URI::Fetch->fetch($feed->{feedurl});
+		my $res=URI::Fetch->fetch($feed->{feedurl},
+			UserAgent => LWP::UserAgent->new(
+				cookie_jar => $config{cookiejar},
+			),
+		);
 		if (! $res) {
 			$feed->{message}=URI::Fetch->errstr;
 			$feed->{error}=1;
@@ -534,6 +579,11 @@ sub aggregate (@) {
 		}
 
 		foreach my $entry ($f->entries) {
+			# XML::Feed doesn't work around XML::Atom's bizarre
+			# API, so we will. Real unicode strings? Yes please.
+			# See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+			local $XML::Atom::ForceUnicode = 1;
+
 			my $c=$entry->content;
 			# atom feeds may have no content, only a summary
 			if (! defined $c && ref $entry->summary) {
@@ -585,6 +635,7 @@ sub add_page (@) {
 		}
 		my $c="";
 		while (exists $IkiWiki::pagecase{lc $page.$c} ||
+		       -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
 		       -e "$config{srcdir}/".htmlfn($page.$c)) {
 			$c++
 		}
@@ -596,6 +647,8 @@ sub add_page (@) {
 			$c="";
 			$page=$feed->{dir}."/item";
 			while (exists $IkiWiki::pagecase{lc $page.$c} ||
+			      -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
+
 			       -e "$config{srcdir}/".htmlfn($page.$c)) {
 				$c++
 			}
@@ -617,7 +670,14 @@ sub add_page (@) {
 	$guid->{md5}=$digest;
 
 	# Create the page.
-	my $template=template($feed->{template}, blind_cache => 1);
+	my $template;
+	eval {
+		$template=template($feed->{template}, blind_cache => 1);
+	};
+	if ($@) {
+		print STDERR gettext("failed to process template:")." $@";
+		return;
+	}
 	$template->param(title => $params{title})
 		if defined $params{title} && length($params{title});
 	$template->param(content => wikiescape(htmlabs($params{content},
@@ -626,24 +686,27 @@ sub add_page (@) {
 	$template->param(url => $feed->{url});
 	$template->param(copyright => $params{copyright})
 		if defined $params{copyright} && length $params{copyright};
-	$template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
+	$template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
 		if defined $params{link};
 	if (ref $feed->{tags}) {
 		$template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
 	}
-	writefile(htmlfn($guid->{page}), $config{srcdir},
-		$template->output);
+	writefile(htmlfn($guid->{page}),
+		$IkiWiki::Plugin::transient::transientdir, $template->output);
 
 	if (defined $mtime && $mtime <= time) {
 		# Set the mtime, this lets the build process get the right
 		# creation time on record for the new page.
-		utime $mtime, $mtime, "$config{srcdir}/".htmlfn($guid->{page});
+		utime $mtime, $mtime,
+			$IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
 		# Store it in pagectime for expiry code to use also.
-		$IkiWiki::pagectime{$guid->{page}}=$mtime;
+		$IkiWiki::pagectime{$guid->{page}}=$mtime
+			unless exists $IkiWiki::pagectime{$guid->{page}};
 	}
 	else {
 		# Dummy value for expiry code.
-		$IkiWiki::pagectime{$guid->{page}}=time;
+		$IkiWiki::pagectime{$guid->{page}}=time
+			unless exists $IkiWiki::pagectime{$guid->{page}};
 	}
 }
 
@@ -652,13 +715,6 @@ sub wikiescape ($) {
 	return encode_entities(shift, '\[\]');
 }
 
-sub urlabs ($$) {
-	my $url=shift;
-	my $urlbase=shift;
-
-	URI->new_abs($url, $urlbase)->as_string;
-}
-
 sub htmlabs ($$) {
 	# Convert links in html from relative to absolute.
 	# Note that this is a heuristic, which is not specified by the rss
@@ -684,7 +740,7 @@ sub htmlabs ($$) {
 				next unless $v_offset; # 0 v_offset means no value
 				my $v = substr($text, $v_offset, $v_len);
 				$v =~ s/^([\'\"])(.*)\1$/$2/;
-				my $new_v=urlabs($v, $urlbase);
+				my $new_v=IkiWiki::urlabs($v, $urlbase);
 				$new_v =~ s/\"/&quot;/g; # since we quote with ""
 				substr($text, $v_offset, $v_len) = qq("$new_v");
 			}