X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/cefbe6210f4e89984bb40062e4f9787b7372dd52..0d666f4a7c504690866db868e454ebc20f00f5f5:/IkiWiki/Render.pm

diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm
index deec539ae..5b951df83 100644
--- a/IkiWiki/Render.pm
+++ b/IkiWiki/Render.pm
@@ -7,26 +7,43 @@ use strict;
 use IkiWiki;
 use Encode;
 
+my %backlinks;
+my $backlinks_calculated=0;
+
+sub calculate_backlinks () { #{{{
+	return if $backlinks_calculated;
+	%backlinks=();
+	foreach my $page (keys %links) {
+		foreach my $link (@{$links{$page}}) {
+			my $bestlink=bestlink($page, $link);
+			if (length $bestlink && $bestlink ne $page) {
+				$backlinks{$bestlink}{$page}=1;
+			}
+		}
+	}
+	$backlinks_calculated=1;
+} #}}}
+
 sub backlinks ($) { #{{{
 	my $page=shift;
 
+	calculate_backlinks();
+
 	my @links;
-	foreach my $p (keys %links) {
-		next if bestlink($page, $p) eq $page;
-		if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
-			my $href=abs2rel(htmlpage($p), dirname($page));
+	return unless $backlinks{$page};
+	foreach my $p (keys %{$backlinks{$page}}) {
+		my $href=abs2rel(htmlpage($p), dirname($page));
 			
-			# Trim common dir prefixes from both pages.
-			my $p_trimmed=$p;
-			my $page_trimmed=$page;
-			my $dir;
-			1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
-			        defined $dir &&
-			        $p_trimmed=~s/^\Q$dir\E// &&
-			        $page_trimmed=~s/^\Q$dir\E//;
-				       
-			push @links, { url => $href, page => pagetitle($p_trimmed) };
-		}
+		# Trim common dir prefixes from both pages.
+		my $p_trimmed=$p;
+		my $page_trimmed=$page;
+		my $dir;
+		1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
+		        defined $dir &&
+		        $p_trimmed=~s/^\Q$dir\E// &&
+		        $page_trimmed=~s/^\Q$dir\E//;
+			       
+		push @links, { url => $href, page => pagetitle($p_trimmed) };
 	}
 
 	return sort { $a->{page} cmp $b->{page} } @links;
@@ -119,21 +136,36 @@ sub mtime ($) { #{{{
 	return (stat($file))[9];
 } #}}}
 
-sub findlinks ($$) { #{{{
-	my $page=shift;
-	my $content=shift;
+sub scan ($) { #{{{
+	my $file=shift;
 
-	my @links;
-	while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
-		push @links, titlepage($2);
-	}
-	if ($config{discussion}) {
-		# Discussion links are a special case since they're not in the
-		# text of the page, but on its template.
-		return @links, "$page/discussion";
+	my $type=pagetype($file);
+	if (defined $type) {
+		my $srcfile=srcfile($file);
+		my $content=readfile($srcfile);
+		my $page=pagename($file);
+		will_render($page, htmlpage($page), 1);
+
+		# Always needs to be done, since filters might add links
+		# to the content.
+		$content=filter($page, $content);
+
+		my @links;
+		while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
+			push @links, titlepage($2);
+		}
+		if ($config{discussion}) {
+			# Discussion links are a special case since they're not in the
+			# text of the page, but on its template.
+			push @links, "$page/discussion";
+		}
+		$links{$page}=\@links;
+		
+		# Preprocess in scan-only mode.
+		preprocess($page, $page, $content, 1);
 	}
 	else {
-		return @links;
+		will_render($file, $file, 1);
 	}
 } #}}}
 
@@ -149,9 +181,6 @@ sub render ($) { #{{{
 		will_render($page, htmlpage($page), 1);
 		
 		$content=filter($page, $content);
-		
-		$links{$page}=[findlinks($page, $content)];
-		
 		$content=preprocess($page, $page, $content);
 		$content=linkify($page, $page, $content);
 		$content=htmlize($page, $type, $content);
@@ -162,7 +191,6 @@ sub render ($) { #{{{
 	}
 	else {
 		my $content=readfile($srcfile, 1);
-		$links{$file}=[];
 		delete $depends{$file};
 		will_render($file, $file, 1);
 		writefile($file, $config{destdir}, $content, 1);
@@ -238,9 +266,7 @@ sub refresh () { #{{{
 	foreach my $file (@files) {
 		my $page=pagename($file);
 		if (! $oldpagemtime{$page}) {
-			debug("new page $page") unless exists $pagectime{$page};
 			push @add, $file;
-			$links{$page}=[];
 			$pagecase{lc $page}=$page;
 			$pagesources{$page}=$file;
 			if ($config{getctime} && -e "$config{srcdir}/$file") {
@@ -256,6 +282,7 @@ sub refresh () { #{{{
 		if (! $exists{$page}) {
 			debug("removing old page $page");
 			push @del, $pagesources{$page};
+			$links{$page}=[];
 			$renderedfiles{$page}=[];
 			$oldpagemtime{$page}=0;
 			prune($config{destdir}."/".$_)
@@ -263,49 +290,47 @@ sub refresh () { #{{{
 			delete $pagesources{$page};
 		}
 	}
-	
-	# render any updated files
+
+	# scan changed and new files
+	my @changed;
 	foreach my $file (@files) {
 		my $page=pagename($file);
 		
 		if (! exists $oldpagemtime{$page} ||
 		    mtime(srcfile($file)) > $oldpagemtime{$page} ||
 	    	    $forcerebuild{$page}) {
-		    	debug("rendering $file");
-			render($file);
-			$rendered{$file}=1;
+		    	debug("scanning $file");
+			push @changed, $file;
+			scan($file);
 		}
 	}
+	calculate_backlinks();
+
+	# render changed and new pages
+	foreach my $file (@changed) {
+		debug("rendering $file");
+		render($file);
+		$rendered{$file}=1;
+	}
 	
-	# if any files were added or removed, check to see if each page
-	# needs an update due to linking to them or inlining them.
-	# TODO: inefficient; pages may get rendered above and again here;
-	# problem is the bestlink may have changed and we won't know until
-	# now
+	# rebuild pages that link to added or removed pages
 	if (@add || @del) {
-FILE:		foreach my $file (@files) {
-			my $page=pagename($file);
-			foreach my $f (@add, @del) {
-				my $p=pagename($f);
-				foreach my $link (@{$links{$page}}) {
-					if (bestlink($page, $link) eq $p) {
-		   				debug("rendering $file, which links to $p");
-						render($file);
-						$rendered{$file}=1;
-						next FILE;
-					}
-				}
+		foreach my $f (@add, @del) {
+			my $p=pagename($f);
+			foreach my $page (keys %{$backlinks{$p}}) {
+				my $file=$pagesources{$page};
+				next if $rendered{$file};
+		   		debug("rendering $file, which links to $p");
+				render($file);
+				$rendered{$file}=1;
 			}
 		}
 	}
 
-	# Handle backlinks; if a page has added/removed links, update the
-	# pages it links to. Also handles rebuilding dependant pages.
-	# TODO: inefficient; pages may get rendered above and again here;
-	# problem is the backlinks could be wrong in the first pass render
-	# above
 	if (%rendered || @del) {
+		# rebuild dependant pages
 		foreach my $f (@files) {
+			next if $rendered{$f};
 			my $p=pagename($f);
 			if (exists $depends{$p}) {
 				foreach my $file (keys %rendered, @del) {
@@ -321,6 +346,8 @@ FILE:		foreach my $file (@files) {
 			}
 		}
 		
+		# handle backlinks; if a page has added/removed links,
+		# update the pages it links to
 		my %linkchanged;
 		foreach my $file (keys %rendered, @del) {
 			my $page=pagename($file);
@@ -347,6 +374,7 @@ FILE:		foreach my $file (@files) {
 		foreach my $link (keys %linkchanged) {
 		    	my $linkfile=$pagesources{$link};
 			if (defined $linkfile) {
+				next if $rendered{$linkfile};
 				debug("rendering $linkfile, to update its backlinks");
 				render($linkfile);
 				$rendered{$linkfile}=1;
@@ -354,7 +382,7 @@ FILE:		foreach my $file (@files) {
 		}
 	}
 
-	# Remove no longer rendered files.
+	# remove no longer rendered files
 	foreach my $src (keys %rendered) {
 		my $page=pagename($src);
 		foreach my $file (@{$oldrenderedfiles{$page}}) {