X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/cddc335b2bd98a302b261200c12d61b05476d727..fe001bd7bf8d16ae998aa66513e3d2276ab9749b:/IkiWiki/Plugin/aggregate.pm

diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm
index 2a4d10411..83bd670cb 100644
--- a/IkiWiki/Plugin/aggregate.pm
+++ b/IkiWiki/Plugin/aggregate.pm
@@ -1,86 +1,240 @@
 #!/usr/bin/perl
-# Blog aggregation plugin.
+# Feed aggregation plugin.
 package IkiWiki::Plugin::aggregate;
 
 use warnings;
 use strict;
-use IkiWiki 2.00;
-use HTML::Entities;
+use IkiWiki 3.00;
 use HTML::Parser;
 use HTML::Tagset;
-use URI;
+use HTML::Entities;
 use open qw{:utf8 :std};
 
 my %feeds;
 my %guids;
 
-sub import { #{{{
+sub import {
 	hook(type => "getopt", id => "aggregate", call => \&getopt);
-	hook(type => "checkconfig", id => "aggregate", call => \&checkconfig);
+	hook(type => "getsetup", id => "aggregate", call => \&getsetup);
+	hook(type => "checkconfig", id => "aggregate", call => \&checkconfig,
+		last => 1);
 	hook(type => "needsbuild", id => "aggregate", call => \&needsbuild);
 	hook(type => "preprocess", id => "aggregate", call => \&preprocess);
         hook(type => "delete", id => "aggregate", call => \&delete);
 	hook(type => "savestate", id => "aggregate", call => \&savestate);
-} # }}}
+	hook(type => "htmlize", id => "_aggregated", call => \&htmlize);
+	if (exists $config{aggregate_webtrigger} && $config{aggregate_webtrigger}) {
+		hook(type => "cgi", id => "aggregate", call => \&cgi);
+	}
+}
 
-sub getopt () { #{{{
+sub getopt () {
         eval q{use Getopt::Long};
 	error($@) if $@;
         Getopt::Long::Configure('pass_through');
-        GetOptions("aggregate" => \$config{aggregate});
-} #}}}
+        GetOptions(
+		"aggregate" => \$config{aggregate},
+		"aggregateinternal!" => \$config{aggregateinternal},
+	);
+}
+
+sub getsetup () {
+	return
+		plugin => {
+			safe => 1,
+			rebuild => undef,
+		},
+		aggregateinternal => {
+			type => "boolean",
+			example => 1,
+			description => "enable aggregation to internal pages?",
+			safe => 0, # enabling needs manual transition
+			rebuild => 0,
+		},
+		aggregate_webtrigger => {
+			type => "boolean",
+			example => 0,
+			description => "allow aggregation to be triggered via the web?",
+			safe => 1,
+			rebuild => 0,
+		},
+		cookiejar => {
+			type => "string",
+			example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
+			safe => 0, # hooks into perl module internals
+			description => "cookie control",
+		},
+}
+
+sub checkconfig () {
+	if (! defined $config{aggregateinternal}) {
+		$config{aggregateinternal}=1;
+	}
+	if (! defined $config{cookiejar}) {
+		$config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
+	}
 
-sub checkconfig () { #{{{
+	# This is done here rather than in a refresh hook because it
+	# needs to run before the wiki is locked.
 	if ($config{aggregate} && ! ($config{post_commit} && 
 	                             IkiWiki::commit_hook_enabled())) {
-		if (! IkiWiki::lockwiki(0)) {
-			debug("wiki is locked by another process, not aggregating");
-			exit 1;
-		}
-		
-		# Fork a child process to handle the aggregation.
-		# The parent process will then handle building the result.
-		# This avoids messy code to clear state accumulated while
-		# aggregating.
-		defined(my $pid = fork) or error("Can't fork: $!");
-		if (! $pid) {
-			loadstate();
+		launchaggregation();
+	}
+}
+
+sub cgi ($) {
+	my $cgi=shift;
+
+	if (defined $cgi->param('do') &&
+	    $cgi->param("do") eq "aggregate_webtrigger") {
+		$|=1;
+		print "Content-Type: text/plain\n\n";
+		$config{cgi}=0;
+		$config{verbose}=1;
+		$config{syslog}=0;
+		print gettext("Aggregation triggered via web.")."\n\n";
+		if (launchaggregation()) {
+			IkiWiki::lockwiki();
 			IkiWiki::loadindex();
-			aggregate();
-			expire();
-			savestate();
-			exit 0;
+			require IkiWiki::Render;
+			IkiWiki::refresh();
+			IkiWiki::saveindex();
+		}
+		else {
+			print gettext("Nothing to do right now, all feeds are up-to-date!")."\n";
 		}
-		waitpid($pid,0);
-		if ($?) {
-			error "aggregation failed with code $?";
+		exit 0;
+	}
+}
+
+sub launchaggregation () {
+	# See if any feeds need aggregation.
+	loadstate();
+	my @feeds=needsaggregate();
+	return unless @feeds;
+	if (! lockaggregate()) {
+		debug("an aggregation process is already running");
+		return;
+	}
+	# force a later rebuild of source pages
+	$IkiWiki::forcerebuild{$_->{sourcepage}}=1
+		foreach @feeds;
+
+	# Fork a child process to handle the aggregation.
+	# The parent process will then handle building the
+	# result. This avoids messy code to clear state
+	# accumulated while aggregating.
+	defined(my $pid = fork) or error("Can't fork: $!");
+	if (! $pid) {
+		IkiWiki::loadindex();
+		# Aggregation happens without the main wiki lock
+		# being held. This allows editing pages etc while
+		# aggregation is running.
+		aggregate(@feeds);
+
+		IkiWiki::lockwiki;
+		# Merge changes, since aggregation state may have
+		# changed on disk while the aggregation was happening.
+		mergestate();
+		expire();
+		savestate();
+		IkiWiki::unlockwiki;
+		exit 0;
+	}
+	waitpid($pid,0);
+	if ($?) {
+		error "aggregation failed with code $?";
+	}
+
+	clearstate();
+	unlockaggregate();
+
+	return 1;
+}
+
+#  Pages with extension _aggregated have plain html markup, pass through.
+sub htmlize (@) {
+	my %params=@_;
+	return $params{content};
+}
+
+# Used by ikiwiki-transition aggregateinternal.
+sub migrate_to_internal {
+	if (! lockaggregate()) {
+		error("an aggregation process is currently running");
+	}
+
+	IkiWiki::lockwiki();
+	loadstate();
+	$config{verbose}=1;
+
+	foreach my $data (values %guids) {
+		next unless $data->{page};
+		next if $data->{expired};
+		
+		$config{aggregateinternal} = 0;
+		my $oldname = "$config{srcdir}/".htmlfn($data->{page});
+		if (! -e $oldname) {
+			$oldname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
 		}
+
+		my $oldoutput = $config{destdir}."/".IkiWiki::htmlpage($data->{page});
 		
-		IkiWiki::unlockwiki();
+		$config{aggregateinternal} = 1;
+		my $newname = $IkiWiki::Plugin::transient::transientdir."/".htmlfn($data->{page});
+		
+		debug "moving $oldname -> $newname";
+		if (-e $newname) {
+			if (-e $oldname) {
+				error("$newname already exists");
+			}
+			else {
+				debug("already renamed to $newname?");
+			}
+		}
+		elsif (-e $oldname) {
+			rename($oldname, $newname) || error("$!");
+		}
+		else {
+			debug("$oldname not found");
+		}
+		if (-e $oldoutput) {
+			require IkiWiki::Render;
+			debug("removing output file $oldoutput");
+			IkiWiki::prune($oldoutput, $config{destdir});
+		}
 	}
-} #}}}
+	
+	savestate();
+	IkiWiki::unlockwiki;
+	
+	unlockaggregate();
+}
 
-sub needsbuild (@) { #{{{
+sub needsbuild (@) {
 	my $needsbuild=shift;
 	
-	loadstate(); # if not already loaded
+	loadstate();
 
 	foreach my $feed (values %feeds) {
 		if (exists $pagesources{$feed->{sourcepage}} && 
 		    grep { $_ eq $pagesources{$feed->{sourcepage}} } @$needsbuild) {
-			# Mark all feeds originating on this page as removable;
-			# preprocess will unmark those that still exist.
-			remove_feeds($feed->{sourcepage});
+			# Mark all feeds originating on this page as 
+			# not yet seen; preprocess will unmark those that
+			# still exist.
+			markunseen($feed->{sourcepage});
 		}
 	}
-} # }}}
 
-sub preprocess (@) { #{{{
+	return $needsbuild;
+}
+
+sub preprocess (@) {
 	my %params=@_;
 
 	foreach my $required (qw{name url}) {
 		if (! exists $params{$required}) {
-			return "[[aggregate ".sprintf(gettext("missing %s parameter"), $required)."]]";
+			error sprintf(gettext("missing %s parameter"), $required)
 		}
 	}
 
@@ -95,7 +249,7 @@ sub preprocess (@) { #{{{
 	$feed->{name}=$name;
 	$feed->{sourcepage}=$params{page};
 	$feed->{url}=$params{url};
-	my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".IkiWiki::titlepage($params{name});
+	my $dir=exists $params{dir} ? $params{dir} : $params{page}."/".titlepage($params{name});
 	$dir=~s/^\/+//;
 	($dir)=$dir=~/$config{wiki_file_regexp}/;
 	$feed->{dir}=$dir;
@@ -103,9 +257,16 @@ sub preprocess (@) { #{{{
 	$feed->{updateinterval}=defined $params{updateinterval} ? $params{updateinterval} * 60 : 15 * 60;
 	$feed->{expireage}=defined $params{expireage} ? $params{expireage} : 0;
 	$feed->{expirecount}=defined $params{expirecount} ? $params{expirecount} : 0;
-	delete $feed->{remove};
-	delete $feed->{expired};
+        if (exists $params{template}) {
+                $params{template}=~s/[^-_a-zA-Z0-9]+//g;
+        }
+        else {
+                $params{template} = "aggregatepost"
+        }
+	$feed->{template}=$params{template} . ".tmpl";
+	delete $feed->{unseen};
 	$feed->{lastupdate}=0 unless defined $feed->{lastupdate};
+	$feed->{lasttry}=$feed->{lastupdate} unless defined $feed->{lasttry};
 	$feed->{numposts}=0 unless defined $feed->{numposts};
 	$feed->{newposts}=0 unless defined $feed->{newposts};
 	$feed->{message}=gettext("new feed") unless defined $feed->{message};
@@ -126,24 +287,35 @@ sub preprocess (@) { #{{{
 	       ($feed->{newposts} ? "; ".$feed->{newposts}.
 	                            " ".gettext("new") : "").
 	       ")";
-} # }}}
+}
 
-sub delete (@) { #{{{
+sub delete (@) {
 	my @files=@_;
 
 	# Remove feed data for removed pages.
 	foreach my $file (@files) {
 		my $page=pagename($file);
-		remove_feeds($page);
+		markunseen($page);
+	}
+}
+
+sub markunseen ($) {
+	my $page=shift;
+
+	foreach my $id (keys %feeds) {
+		if ($feeds{$id}->{sourcepage} eq $page) {
+			$feeds{$id}->{unseen}=1;
+		}
 	}
-} #}}}
+}
 
 my $state_loaded=0;
-sub loadstate () { #{{{
+
+sub loadstate () {
 	return if $state_loaded;
 	$state_loaded=1;
 	if (-e "$config{wikistatedir}/aggregate") {
-		open(IN, "$config{wikistatedir}/aggregate") ||
+		open(IN, "<", "$config{wikistatedir}/aggregate") ||
 			die "$config{wikistatedir}/aggregate: $!";
 		while (<IN>) {
 			$_=IkiWiki::possibly_foolish_untaint($_);
@@ -173,36 +345,15 @@ sub loadstate () { #{{{
 
 		close IN;
 	}
-} #}}}
+}
 
-sub savestate () { #{{{
+sub savestate () {
 	return unless $state_loaded;
-	eval q{use HTML::Entities};
-	error($@) if $@;
+	garbage_collect();
 	my $newfile="$config{wikistatedir}/aggregate.new";
 	my $cleanup = sub { unlink($newfile) };
-	open (OUT, ">$newfile") || error("open $newfile: $!", $cleanup);
+	open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
 	foreach my $data (values %feeds, values %guids) {
-		if ($data->{remove}) {
-			if ($data->{name}) {
-				foreach my $guid (values %guids) {
-					if ($guid->{feed} eq $data->{name}) {
-						$guid->{remove}=1;
-					}
-				}
-			}
-			else {
-				unlink pagefile($data->{page})
-					if exists $data->{page};
-			}
-			next;
-		}
-		elsif ($data->{expired} && exists $data->{page}) {
-			unlink pagefile($data->{page});
-			delete $data->{page};
-			delete $data->{md5};
-		}
-
 		my @line;
 		foreach my $field (keys %$data) {
 			if ($field eq "name" || $field eq "feed" ||
@@ -213,7 +364,8 @@ sub savestate () { #{{{
 				push @line, "tag=$_" foreach @{$data->{tags}};
 			}
 			else {
-				push @line, "$field=".$data->{$field};
+				push @line, "$field=".$data->{$field}
+					if defined $data->{$field};
 			}
 		}
 		print OUT join(" ", @line)."\n" || error("write $newfile: $!", $cleanup);
@@ -221,18 +373,104 @@ sub savestate () { #{{{
 	close OUT || error("save $newfile: $!", $cleanup);
 	rename($newfile, "$config{wikistatedir}/aggregate") ||
 		error("rename $newfile: $!", $cleanup);
-} #}}}
 
-sub expire () { #{{{
+	my $timestamp=undef;
+	foreach my $feed (keys %feeds) {
+		my $t=$feeds{$feed}->{lastupdate}+$feeds{$feed}->{updateinterval};
+		if (! defined $timestamp || $timestamp > $t) {
+			$timestamp=$t;
+		}
+	}
+	$newfile=~s/\.new$/time/;
+	open (OUT, ">", $newfile) || error("open $newfile: $!", $cleanup);
+	if (defined $timestamp) {
+		print OUT $timestamp."\n";
+	}
+	close OUT || error("save $newfile: $!", $cleanup);
+}
+
+sub garbage_collect () {
+	foreach my $name (keys %feeds) {
+		# remove any feeds that were not seen while building the pages
+		# that used to contain them
+		if ($feeds{$name}->{unseen}) {
+			delete $feeds{$name};
+		}
+	}
+
+	foreach my $guid (values %guids) {
+		# any guid whose feed is gone should be removed
+		if (! exists $feeds{$guid->{feed}}) {
+			if (exists $guid->{page}) {
+				unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page})
+					|| unlink "$config{srcdir}/".htmlfn($guid->{page});
+			}
+			delete $guids{$guid->{guid}};
+		}
+		# handle expired guids
+		elsif ($guid->{expired} && exists $guid->{page}) {
+			unlink "$config{srcdir}/".htmlfn($guid->{page});
+			unlink $IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
+			delete $guid->{page};
+			delete $guid->{md5};
+		}
+	}
+}
+
+sub mergestate () {
+	# Load the current state in from disk, and merge into it
+	# values from the state in memory that might have changed
+	# during aggregation.
+	my %myfeeds=%feeds;
+	my %myguids=%guids;
+	clearstate();
+	loadstate();
+
+	# All that can change in feed state during aggregation is a few
+	# fields.
+	foreach my $name (keys %myfeeds) {
+		if (exists $feeds{$name}) {
+			foreach my $field (qw{message lastupdate lasttry
+			                      numposts newposts error}) {
+				$feeds{$name}->{$field}=$myfeeds{$name}->{$field};
+			}
+		}
+	}
+
+	# New guids can be created during aggregation.
+	# Guids have a few fields that may be updated during aggregation.
+	# It's also possible that guids were removed from the on-disk state
+	# while the aggregation was in process. That would only happen if
+	# their feed was also removed, so any removed guids added back here
+	# will be garbage collected later.
+	foreach my $guid (keys %myguids) {
+		if (! exists $guids{$guid}) {
+			$guids{$guid}=$myguids{$guid};
+		}
+		else {
+			foreach my $field (qw{md5}) {
+				$guids{$guid}->{$field}=$myguids{$guid}->{$field};
+			}
+		}
+	}
+}
+
+sub clearstate () {
+	%feeds=();
+	%guids=();
+	$state_loaded=0;
+}
+
+sub expire () {
 	foreach my $feed (values %feeds) {
 		next unless $feed->{expireage} || $feed->{expirecount};
 		my $count=0;
 		my %seen;
-		foreach my $item (sort { $IkiWiki::pagectime{$b->{page}} <=> $IkiWiki::pagectime{$a->{page}} }
-		                  grep { exists $_->{page} && $_->{feed} eq $feed->{name} && $IkiWiki::pagectime{$_->{page}} }
+		foreach my $item (sort { ($IkiWiki::pagectime{$b->{page}} || 0) <=> ($IkiWiki::pagectime{$a->{page}} || 0) }
+		                  grep { exists $_->{page} && $_->{feed} eq $feed->{name} }
 		                  values %guids) {
 			if ($feed->{expireage}) {
-				my $days_old = (time - $IkiWiki::pagectime{$item->{page}}) / 60 / 60 / 24;
+				my $days_old = (time - ($IkiWiki::pagectime{$item->{page}} || 0)) / 60 / 60 / 24;
 				if ($days_old > $feed->{expireage}) {
 					debug(sprintf(gettext("expiring %s (%s days old)"),
 						$item->{page}, int($days_old)));
@@ -252,25 +490,26 @@ sub expire () { #{{{
 			}
 		}
 	}
-} #}}}
+}
 
-sub aggregate () { #{{{
+sub needsaggregate () {
+	return values %feeds if $config{rebuild};
+	return grep { time - $_->{lastupdate} >= $_->{updateinterval} } values %feeds;
+}
+
+sub aggregate (@) {
+	eval q{use Net::INET6Glue::INET_is_INET6}; # may not be available
 	eval q{use XML::Feed};
 	error($@) if $@;
 	eval q{use URI::Fetch};
 	error($@) if $@;
-	eval q{use HTML::Entities};
-	error($@) if $@;
 
-	foreach my $feed (values %feeds) {
-		next unless $config{rebuild} || 
-			time - $feed->{lastupdate} >= $feed->{updateinterval};
-		$feed->{lastupdate}=time;
+	foreach my $feed (@_) {
+		$feed->{lasttry}=time;
 		$feed->{newposts}=0;
-		$feed->{message}=sprintf(gettext("processed ok at %s"),
-			displaytime($feed->{lastupdate}));
+		$feed->{message}=sprintf(gettext("last checked %s"),
+			displaytime($feed->{lasttry}));
 		$feed->{error}=0;
-		$IkiWiki::forcerebuild{$feed->{sourcepage}}=1;
 
 		debug(sprintf(gettext("checking feed %s ..."), $feed->{name}));
 
@@ -284,13 +523,21 @@ sub aggregate () { #{{{
 			}
 			$feed->{feedurl}=pop @urls;
 		}
-		my $res=URI::Fetch->fetch($feed->{feedurl});
+		my $res=URI::Fetch->fetch($feed->{feedurl},
+			UserAgent => LWP::UserAgent->new(
+				cookie_jar => $config{cookiejar},
+			),
+		);
 		if (! $res) {
 			$feed->{message}=URI::Fetch->errstr;
 			$feed->{error}=1;
 			debug($feed->{message});
 			next;
 		}
+
+		# lastupdate is only set if we were able to contact the server
+		$feed->{lastupdate}=$feed->{lasttry};
+
 		if ($res->status == URI::Fetch::URI_GONE()) {
 			$feed->{message}=gettext("feed not found");
 			$feed->{error}=1;
@@ -304,15 +551,19 @@ sub aggregate () { #{{{
 			# that contains invalid UTF-8 sequences. Convert
 			# feed to ascii to try to work around.
 			$feed->{message}.=" ".sprintf(gettext("(invalid UTF-8 stripped from feed)"));
-			$content=Encode::decode_utf8($content);
-			$f=eval{XML::Feed->parse(\$content)};
+			$f=eval {
+				$content=Encode::decode_utf8($content, 0);
+				XML::Feed->parse(\$content)
+			};
 		}
 		if ($@) {
 			# Another possibility is badly escaped entities.
 			$feed->{message}.=" ".sprintf(gettext("(feed entities escaped)"));
 			$content=~s/\&(?!amp)(\w+);/&amp;$1;/g;
-			$content=Encode::decode_utf8($content);
-			$f=eval{XML::Feed->parse(\$content)};
+			$f=eval {
+				$content=Encode::decode_utf8($content, 0);
+				XML::Feed->parse(\$content)
+			};
 		}
 		if ($@) {
 			$feed->{message}=gettext("feed crashed XML::Feed!")." ($@)";
@@ -328,20 +579,32 @@ sub aggregate () { #{{{
 		}
 
 		foreach my $entry ($f->entries) {
+			# XML::Feed doesn't work around XML::Atom's bizarre
+			# API, so we will. Real unicode strings? Yes please.
+			# See [[bugs/Aggregated_Atom_feeds_are_double-encoded]]
+			local $XML::Atom::ForceUnicode = 1;
+
+			my $c=$entry->content;
+			# atom feeds may have no content, only a summary
+			if (! defined $c && ref $entry->summary) {
+				$c=$entry->summary;
+			}
+
 			add_page(
 				feed => $feed,
 				copyright => $f->copyright,
 				title => defined $entry->title ? decode_entities($entry->title) : "untitled",
 				link => $entry->link,
-				content => defined $entry->content->body ? $entry->content->body : "",
-				guid => defined $entry->id ? $entry->id : time."_".$feed->name,
+				content => (defined $c && defined $c->body) ? $c->body : "",
+				guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
 				ctime => $entry->issued ? ($entry->issued->epoch || time) : time,
+				base => (defined $c && $c->can("base")) ? $c->base : undef,
 			);
 		}
 	}
-} #}}}
+}
 
-sub add_page (@) { #{{{
+sub add_page (@) {
 	my %params=@_;
 	
 	my $feed=$params{feed};
@@ -351,6 +614,7 @@ sub add_page (@) { #{{{
 		# updating an existing post
 		$guid=$guids{$params{guid}};
 		return if $guid->{expired};
+		write_page($feed, $guid, $mtime, \%params);
 	}
 	else {
 		# new post
@@ -361,7 +625,7 @@ sub add_page (@) { #{{{
 		$feed->{newposts}++;
 
 		# assign it an unused page
-		my $page=IkiWiki::titlepage($params{title});
+		my $page=titlepage($params{title});
 		# escape slashes and periods in title so it doesn't specify
 		# directory name or trigger ".." disallowing code.
 		$page=~s!([/.])!"__".ord($1)."__"!eg;
@@ -372,25 +636,38 @@ sub add_page (@) { #{{{
 		}
 		my $c="";
 		while (exists $IkiWiki::pagecase{lc $page.$c} ||
-		       -e pagefile($page.$c)) {
+		       -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
+		       -e "$config{srcdir}/".htmlfn($page.$c)) {
 			$c++
 		}
 
-		# Make sure that the file name isn't too long. 
-		# NB: This doesn't check for path length limits.
-		my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-		if (defined $max && length(htmlfn($page)) >= $max) {
+		$guid->{page}=$page;
+		eval { write_page($feed, $guid, $mtime, \%params) };
+		if ($@) {
+			# assume failure was due to a too long filename
+			# (or o
 			$c="";
 			$page=$feed->{dir}."/item";
 			while (exists $IkiWiki::pagecase{lc $page.$c} ||
-			       -e pagefile($page.$c)) {
+			      -e $IkiWiki::Plugin::transient::transientdir."/".htmlfn($page.$c) ||
+			      -e "$config{srcdir}/".htmlfn($page.$c)) {
 				$c++
 			}
+
+			$guid->{page}=$page;
+			write_page($feed, $guid, $mtime, \%params);
 		}
 
-		$guid->{page}=$page;
 		debug(sprintf(gettext("creating new page %s"), $page));
 	}
+}
+
+sub write_page ($$$$$) {
+	my $feed=shift;
+	my $guid=shift;
+	my $mtime=shift;
+	my %params=%{shift()};
+
 	$guid->{feed}=$feed->{name};
 	
 	# To write or not to write? Need to avoid writing unchanged pages
@@ -404,43 +681,52 @@ sub add_page (@) { #{{{
 	$guid->{md5}=$digest;
 
 	# Create the page.
-	my $template=template("aggregatepost.tmpl", blind_cache => 1);
+	my $template;
+	eval {
+		$template=template($feed->{template}, blind_cache => 1);
+	};
+	if ($@) {
+		print STDERR gettext("failed to process template:")." $@";
+		return;
+	}
 	$template->param(title => $params{title})
 		if defined $params{title} && length($params{title});
-	$template->param(content => htmlescape(htmlabs($params{content}, $feed->{feedurl})));
+	$template->param(content => wikiescape(htmlabs($params{content},
+		defined $params{base} ? $params{base} : $feed->{feedurl})));
 	$template->param(name => $feed->{name});
 	$template->param(url => $feed->{url});
 	$template->param(copyright => $params{copyright})
 		if defined $params{copyright} && length $params{copyright};
-	$template->param(permalink => urlabs($params{link}, $feed->{feedurl}))
+	$template->param(permalink => IkiWiki::urlabs($params{link}, $feed->{feedurl}))
 		if defined $params{link};
 	if (ref $feed->{tags}) {
 		$template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
 	}
-	writefile(htmlfn($guid->{page}), $config{srcdir},
-		$template->output);
-
-	# Set the mtime, this lets the build process get the right creation
-	# time on record for the new page.
-	utime $mtime, $mtime, pagefile($guid->{page})
-		if defined $mtime && $mtime <= time;
-} #}}}
+	writefile(htmlfn($guid->{page}),
+		$IkiWiki::Plugin::transient::transientdir, $template->output);
+
+	if (defined $mtime && $mtime <= time) {
+		# Set the mtime, this lets the build process get the right
+		# creation time on record for the new page.
+		utime $mtime, $mtime,
+			$IkiWiki::Plugin::transient::transientdir."/".htmlfn($guid->{page});
+		# Store it in pagectime for expiry code to use also.
+		$IkiWiki::pagectime{$guid->{page}}=$mtime
+			unless exists $IkiWiki::pagectime{$guid->{page}};
+	}
+	else {
+		# Dummy value for expiry code.
+		$IkiWiki::pagectime{$guid->{page}}=time
+			unless exists $IkiWiki::pagectime{$guid->{page}};
+	}
+}
 
-sub htmlescape ($) { #{{{
+sub wikiescape ($) {
 	# escape accidental wikilinks and preprocessor stuff
-	my $html=shift;
-	$html=~s/(?<!\\)\[\[/\\\[\[/g;
-	return $html;
-} #}}}
+	return encode_entities(shift, '\[\]');
+}
 
-sub urlabs ($$) { #{{{
-	my $url=shift;
-	my $urlbase=shift;
-
-	URI->new_abs($url, $urlbase)->as_string;
-} #}}}
-
-sub htmlabs ($$) { #{{{
+sub htmlabs ($$) {
 	# Convert links in html from relative to absolute.
 	# Note that this is a heuristic, which is not specified by the rss
 	# spec and may not be right for all feeds. Also, see Debian
@@ -465,7 +751,7 @@ sub htmlabs ($$) { #{{{
 				next unless $v_offset; # 0 v_offset means no value
 				my $v = substr($text, $v_offset, $v_len);
 				$v =~ s/^([\'\"])(.*)\1$/$2/;
-				my $new_v=urlabs($v, $urlbase);
+				my $new_v=IkiWiki::urlabs($v, $urlbase);
 				$new_v =~ s/\"/&quot;/g; # since we quote with ""
 				substr($text, $v_offset, $v_len) = qq("$new_v");
 			}
@@ -476,28 +762,32 @@ sub htmlabs ($$) { #{{{
 	$p->eof;
 
 	return $ret;
-} #}}}
+}
 
-sub remove_feeds () { #{{{
-	my $page=shift;
-
-	my %removed;
-	foreach my $id (keys %feeds) {
-		if ($feeds{$id}->{sourcepage} eq $page) {
-			$feeds{$id}->{remove}=1;
-			$removed{$id}=1;
-		}
-	}
-} #}}}
+sub htmlfn ($) {
+	return shift().".".($config{aggregateinternal} ? "_aggregated" : $config{htmlext});
+}
 
-sub pagefile ($) { #{{{
-	my $page=shift;
+my $aggregatelock;
 
-	return "$config{srcdir}/".htmlfn($page);
-} #}}}
+sub lockaggregate () {
+	# Take an exclusive lock to prevent multiple concurrent aggregators.
+	# Returns true if the lock was aquired.
+	if (! -d $config{wikistatedir}) {
+		mkdir($config{wikistatedir});
+	}
+	open($aggregatelock, '>', "$config{wikistatedir}/aggregatelock") ||
+		error ("cannot open to $config{wikistatedir}/aggregatelock: $!");
+	if (! flock($aggregatelock, 2 | 4)) { # LOCK_EX | LOCK_NB
+		close($aggregatelock) || error("failed closing aggregatelock: $!");
+		return 0;
+	}
+	return 1;
+}
 
-sub htmlfn ($) { #{{{
-	return shift().".".$config{htmlext};
-} #}}}
+sub unlockaggregate () {
+	return close($aggregatelock) if $aggregatelock;
+	return;
+}
 
 1