X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/d851ae76f66e62896bb875f57588f4265ea33094..0d0b87be5829e3e5671291a643bb4495a4cc7b99:/IkiWiki/Plugin/aggregate.pm

diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
index 7d4565342..5e22609c9 100644
--- a/IkiWiki/Plugin/aggregate.pm
+++ b/IkiWiki/Plugin/aggregate.pm
@@ -16,7 +16,8 @@ my %guids;
 sub import {
 	hook(type => "getopt", id => "aggregate", call => \&getopt);
 	hook(type => "getsetup", id => "aggregate", call => \&getsetup);
-	hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+	hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+		last => 1);
 	hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
 	hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
@@ -57,13 +58,24 @@ sub getsetup () {
 			safe => 1,
 			rebuild => 0,
 		},
+		cookiejar => {
+			type => "string",
+			example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
+			safe => 0, # hooks into perl module internals
+			description => "cookie control",
+		},
 }
 
 sub checkconfig () {
 	if (! defined $config{aggregateinternal}) {
 		$config{aggregateinternal}=1;
 	}
+	if (! defined $config{cookiejar}) {
+		$config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
+	}
 
+	# This is done here rather than in a refresh hook because it
+	# needs to run before the wiki is locked.
 	if ($config{aggregate} && ! ($config{post_commit} && 
 	                             IkiWiki::commit_hook_enabled())) {
 		launchaggregation();
@@ -390,8 +402,8 @@ sub garbage_collect () {
 		# any guid whose feed is gone should be removed
 		if (! exists $feeds{$guid->{feed}}) {
 			if (exists $guid->{page}) {
-				unlink "$config{srcdir}/".htmlfn($guid->{page});
-				unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
+				unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
+					|| unlink "$config{srcdir}/".htmlfn($guid->{page});
 			}
 			delete $guids{$guid->{guid}};
 		}
@@ -486,6 +498,7 @@ sub needsaggregate () {
 }
 
 sub aggregate (@) {
+	eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
 	eval q{use XML::Feed};
 	error($@) if $@;
 	eval q{use URI::Fetch};
@@ -510,7 +523,11 @@ sub aggregate (@) {
 			}
 			$feed->{feedurl}=pop @urls;
 		}
-		my $res=URI::Fetch->fetch($feed->{feedurl});
+		my $res=URI::Fetch->fetch($feed->{feedurl},
+			UserAgent => LWP::UserAgent->new(
+				cookie_jar => $config{cookiejar},
+			),
+		);
 		if (! $res) {
 			$feed->{message}=URI::Fetch->errstr;
 			$feed->{error}=1;
@@ -597,6 +614,7 @@ sub add_page (@) {
 		# updating an existing post
 		$guid=$guids{$params{guid}};
 		return if $guid->{expired};
+		write_page($feed, $guid, $mtime, \%params);
 	}
 	else {
 		# new post
@@ -623,23 +641,33 @@ sub add_page (@) {
 			$c++
 		}
 
-		# Make sure that the file name isn't too long. 
-		# NB: This doesn't check for path length limits.
-		my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-		if (defined $max && length(htmlfn($page)) >= $max) {
+		$guid->{page}=$page;
+		eval { write_page($feed, $guid, $mtime, \%params) };
+		if ($@) {
+			# assume failure was due to a too long filename
+			# (or o
 			$c="";
 			$page=$feed->{dir}."/item";
 			while (exists $IkiWiki::pagecase{lc $page.$c} ||
 			      -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
-
-			       -e "$config{srcdir}/".htmlfn($page.$c)) {
+			      -e "$config{srcdir}/".htmlfn($page.$c)) {
 				$c++
 			}
+
+			$guid->{page}=$page;
+			write_page($feed, $guid, $mtime, \%params);
 		}
 
-		$guid->{page}=$page;
 		debug(sprintf(gettext("creating new page %s"), $page));
 	}
+}
+
+sub write_page ($$$$$) {
+	my $feed=shift;
+	my $guid=shift;
+	my $mtime=shift;
+	my %params=%{shift()};
+
 	$guid->{feed}=$feed->{name};
 	
 	# To write or not to write? Need to avoid writing unchanged pages