X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/d844a3487ed03fc35eecd89c6924c5e5ad36b4c5..9ea3f9dfe7c0341f4e002b48728b8139293e19d0:/IkiWiki/Plugin/aggregate.pm

diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
index 3e3eb6d93..8f0870e2e 100644
--- a/IkiWiki/Plugin/aggregate.pm
+++ b/IkiWiki/Plugin/aggregate.pm
@@ -513,7 +513,10 @@ sub aggregate (@) {
 			}
 			$feed->{feedurl}=pop @urls;
 		}
-		my $ua=useragent();
+		# Using the for_url parameter makes sure we crash if used
+		# with an older IkiWiki.pm that didn't automatically try
+		# to use LWPx::ParanoidAgent.
+		my $ua=useragent(for_url => $feed->{feedurl});
 		my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
 		if (! $res) {
 			$feed->{message}=URI::Fetch->errstr;
@@ -553,7 +556,9 @@ sub aggregate (@) {
 			};
 		}
 		if ($@) {
-			$feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
+			# gettext can clobber $@
+			my $error = $@;
+			$feed->{message}=gettext("feed crashed XML::Feed!")." ($error)";
 			$feed->{error}=1;
 			debug($feed->{message});
 			next;
@@ -569,7 +574,9 @@ sub aggregate (@) {
 			# XML::Feed doesn't work around XML::Atom's bizarre
 			# API, so we will. Real unicode strings? Yes please.
 			# See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+			no warnings 'once';
 			local $XML::Atom::ForceUnicode = 1;
+			use warnings;
 
 			my $c=$entry->content;
 			# atom feeds may have no content, only a summary
@@ -617,23 +624,23 @@ sub add_page (@) {
 		# escape slashes and periods in title so it doesn't specify
 		# directory name or trigger ".." disallowing code.
 		$page=~s!([/.])!"__".ord($1)."__"!eg;
-		$page=$feed->{dir}."/".$page;
-		($page)=$page=~/$config{wiki_file_regexp}/;
 		if (! defined $page || ! length $page) {
 			$page=$feed->{dir}."/item";
 		}
+		$page=$feed->{dir}."/".$page;
+		($page)=$page=~/$config{wiki_file_regexp}/;
 		my $c="";
 		while (exists $IkiWiki::pagecase{lc $page.$c} ||
 		       -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
 		       -e "$config{srcdir}/".htmlfn($page.$c)) {
 			$c++
 		}
+		$page=$page.$c;
 
 		$guid->{page}=$page;
 		eval { write_page($feed, $guid, $mtime, \%params) };
 		if ($@) {
 			# assume failure was due to a too long filename
-			# (or o
 			$c="";
 			$page=$feed->{dir}."/item";
 			while (exists $IkiWiki::pagecase{lc $page.$c} ||
@@ -641,6 +648,7 @@ sub add_page (@) {
 			      -e "$config{srcdir}/".htmlfn($page.$c)) {
 				$c++
 			}
+			$page=$page.$c;
 
 			$guid->{page}=$page;
 			write_page($feed, $guid, $mtime, \%params);
@@ -674,7 +682,9 @@ sub write_page ($$$$$) {
 		$template=template($feed->{template}, blind_cache => 1);
 	};
 	if ($@) {
-		print STDERR gettext("failed to process template:")." $@";
+		# gettext can clobber $@
+		my $error = $@;
+		print STDERR gettext("failed to process template:")." $error";
 		return;
 	}
 	$template->param(title => $params{title})