X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/a44bfb158dac9e04647a75bc819a73bc18f5acce..92de3e5ac943fab4889d820e698c0eb7e54c0670:/IkiWiki/Render.pm?ds=inline

diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm
index f90f16335..48a25bef7 100644
--- a/IkiWiki/Render.pm
+++ b/IkiWiki/Render.pm
@@ -4,79 +4,8 @@ package IkiWiki;
 
 use warnings;
 use strict;
-use File::Spec;
 use IkiWiki;
-
-sub linkify ($$) { #{{{
-	my $content=shift;
-	my $page=shift;
-
-	$content =~ s{(\\?)$config{wiki_link_regexp}}{
-		$2 ? ( $1 ? "[[$2|$3]]" : htmllink($page, titlepage($3), 0, 0, pagetitle($2)))
-		   : ( $1 ? "[[$3]]" :    htmllink($page, titlepage($3)))
-	}eg;
-	
-	return $content;
-} #}}}
-
-my $_scrubber;
-sub scrubber { #{{{
-	return $_scrubber if defined $_scrubber;
-	
-	eval q{use HTML::Scrubber};
-	# Lists based on http://feedparser.org/docs/html-sanitization.html
-	$_scrubber = HTML::Scrubber->new(
-		allow => [qw{
-			a abbr acronym address area b big blockquote br
-			button caption center cite code col colgroup dd del
-			dfn dir div dl dt em fieldset font form h1 h2 h3 h4
-			h5 h6 hr i img input ins kbd label legend li map
-			menu ol optgroup option p pre q s samp select small
-			span strike strong sub sup table tbody td textarea
-			tfoot th thead tr tt u ul var
-		}],
-		default => [undef, { map { $_ => 1 } qw{
-			abbr accept accept-charset accesskey action
-			align alt axis border cellpadding cellspacing
-			char charoff charset checked cite class
-			clear cols colspan color compact coords
-			datetime dir disabled enctype for frame
-			headers height href hreflang hspace id ismap
-			label lang longdesc maxlength media method
-			multiple name nohref noshade nowrap prompt
-			readonly rel rev rows rowspan rules scope
-			selected shape size span src start summary
-			tabindex target title type usemap valign
-			value vspace width
-		}}],
-	);
-	return $_scrubber;
-} # }}}
-
-sub htmlize ($$) { #{{{
-	my $type=shift;
-	my $content=shift;
-	
-	if (! $INC{"/usr/bin/markdown"}) {
-		no warnings 'once';
-		$blosxom::version="is a proper perl module too much to ask?";
-		use warnings 'all';
-		do "/usr/bin/markdown";
-	}
-	
-	if ($type eq '.mdwn') {
-		$content=Markdown::Markdown($content);
-	}
-	else {
-		error("htmlization of $type not supported");
-	}
-
-	if ($config{sanitize}) {
-		$content=scrubber()->scrub($content);
-	}
-	
-	return $content;
-} #}}}
+use Encode;
 
 sub backlinks ($) { #{{{
 	my $page=shift;
@@ -85,7 +14,7 @@ sub backlinks ($) { #{{{
 	foreach my $p (keys %links) {
 		next if bestlink($page, $p) eq $page;
 		if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
-			my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
+			my $href=abs2rel(htmlpage($p), dirname($page));
 			
 			# Trim common dir prefixes from both pages.
 			my $p_trimmed=$p;
@@ -96,7 +25,7 @@ sub backlinks ($) { #{{{
 			        $p_trimmed=~s/^\Q$dir\E// &&
 			        $page_trimmed=~s/^\Q$dir\E//;
 				       
-			push @links, { url => $href, page => $p_trimmed };
+			push @links, { url => $href, page => pagetitle($p_trimmed) };
 		}
 	}
 
@@ -110,10 +39,11 @@ sub parentlinks ($) { #{{{
 	my $pagelink="";
 	my $path="";
 	my $skip=1;
+	return if $page eq 'index'; # toplevel
 	foreach my $dir (reverse split("/", $page)) {
 		if (! $skip) {
 			$path.="../";
-			unshift @ret, { url => "$path$dir.html", page => $dir };
+			unshift @ret, { url => $path.htmlpage($dir), page => pagetitle($dir) };
 		}
 		else {
 			$skip=0;
@@ -123,105 +53,64 @@ sub parentlinks ($) { #{{{
 	return @ret;
 } #}}}
 
-sub preprocess ($$) { #{{{
-	my $page=shift;
-	my $content=shift;
-
-	my $handle=sub {
-		my $escape=shift;
-		my $command=shift;
-		my $params=shift;
-		if (length $escape) {
-			return "[[$command $params]]";
-		}
-		elsif (exists $hooks{preprocess}{$command}) {
-			my %params;
-			while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
-				$params{$1}=$2;
-			}
-			return $hooks{preprocess}{$command}{call}->(page => $page, %params);
-		}
-		else {
-			return "[[$command not processed]]";
-		}
-	};
-	
-	$content =~ s{(\\?)$config{wiki_processor_regexp}}{$handle->($1, $2, $3)}eg;
-	return $content;
-} #}}}
-
-sub add_depends ($$) { #{{{
-	my $page=shift;
-	my $globlist=shift;
-	
-	if (! exists $depends{$page}) {
-		$depends{$page}=$globlist;
-	}
-	else {
-		$depends{$page}=globlist_merge($depends{$page}, $globlist);
-	}
-} # }}}
-
-sub globlist_merge ($$) { #{{{
-	my $a=shift;
-	my $b=shift;
-
-	my $ret="";
-	# Only add negated globs if they are not matched by the other globlist.
-	foreach my $i ((map { [ $a, $_ ] } split(" ", $b)), 
-	               (map { [ $b, $_ ] } split(" ", $a))) {
-		if ($i->[1]=~/^!(.*)/) {
-			if (! globlist_match($1, $i->[0])) {
-				$ret.=" ".$i->[1];
-			}
-		}
-		else {
-			$ret.=" ".$i->[1];
-		}
-	}
-	
-	return $ret;
-} #}}}
-
 sub genpage ($$$) { #{{{
-	my $content=shift;
 	my $page=shift;
+	my $content=shift;
 	my $mtime=shift;
 
-	my $title=pagetitle(basename($page));
-	
-	my $template=HTML::Template->new(blind_cache => 1,
-		filename => "$config{templatedir}/page.tmpl");
-	
+	my $template=template("page.tmpl", blind_cache => 1);
+	my $actions=0;
+
 	if (length $config{cgiurl}) {
 		$template->param(editurl => cgiurl(do => "edit", page => $page));
 		$template->param(prefsurl => cgiurl(do => "prefs"));
 		if ($config{rcs}) {
 			$template->param(recentchangesurl => cgiurl(do => "recentchanges"));
 		}
+		$actions++;
 	}
 
 	if (length $config{historyurl}) {
 		my $u=$config{historyurl};
 		$u=~s/\[\[file\]\]/$pagesources{$page}/g;
 		$template->param(historyurl => $u);
+		$actions++;
 	}
-	if ($config{hyperestraier}) {
-		$template->param(hyperestraierurl => cgiurl());
+	if ($config{discussion}) {
+		$template->param(discussionlink => htmllink($page, $page, "Discussion", 1, 1));
+		$actions++;
+	}
+
+	if ($actions) {
+		$template->param(have_actions => 1);
 	}
 
 	$template->param(
-		title => $title,
+		title => $page eq 'index' 
+			? $config{wikiname} 
+			: pagetitle(basename($page)),
 		wikiname => $config{wikiname},
 		parentlinks => [parentlinks($page)],
 		content => $content,
 		backlinks => [backlinks($page)],
-		discussionlink => htmllink($page, "Discussion", 1, 1),
-		mtime => scalar(gmtime($mtime)),
-		styleurl => styleurl($page),
+		mtime => displaytime($mtime),
+		baseurl => baseurl($page),
 	);
+
+	run_hooks(pagetemplate => sub {
+		shift->(page => $page, destpage => $page, template => $template);
+	});
 	
-	return $template->output;
+	$content=$template->output;
+
+	run_hooks(format => sub {
+		$content=shift->(
+			page => $page,
+			content => $content,
+		);
+	});
+
+	return $content;
 } #}}}
 
 sub check_overwrite ($$) { #{{{
@@ -231,10 +120,7 @@ sub check_overwrite ($$) { #{{{
 	my $src=shift;
 	
 	if (! exists $renderedfiles{$src} && -e $dest && ! $config{rebuild}) {
-		error("$dest already exists and was rendered from ".
-			join(" ",(grep { $renderedfiles{$_} eq $dest } keys
-				%renderedfiles)).
-			", before, so not rendering from $src");
+		error("$dest already exists and was not rendered from $src before");
 	}
 } #}}}
 
@@ -245,16 +131,21 @@ sub mtime ($) { #{{{
 } #}}}
 
 sub findlinks ($$) { #{{{
-	my $content=shift;
 	my $page=shift;
+	my $content=shift;
 
 	my @links;
 	while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
 		push @links, titlepage($2);
 	}
-	# Discussion links are a special case since they're not in the text
-	# of the page, but on its template.
-	return @links, "$page/discussion";
+	if ($config{discussion}) {
+		# Discussion links are a special case since they're not in the
+		# text of the page, but on its template.
+		return @links, "$page/discussion";
+	}
+	else {
+		return @links;
+	}
 } #}}}
 
 sub render ($) { #{{{
@@ -262,20 +153,22 @@ sub render ($) { #{{{
 	
 	my $type=pagetype($file);
 	my $srcfile=srcfile($file);
-	if ($type ne 'unknown') {
+	if (defined $type) {
 		my $content=readfile($srcfile);
 		my $page=pagename($file);
-		
-		$links{$page}=[findlinks($content, $page)];
 		delete $depends{$page};
 		
-		$content=linkify($content, $page);
-		$content=preprocess($page, $content);
-		$content=htmlize($type, $content);
+		$content=filter($page, $content);
+		
+		$links{$page}=[findlinks($page, $content)];
+		
+		$content=preprocess($page, $page, $content);
+		$content=linkify($page, $page, $content);
+		$content=htmlize($page, $type, $content);
 		
 		check_overwrite("$config{destdir}/".htmlpage($page), $page);
 		writefile(htmlpage($page), $config{destdir},
-			genpage($content, $page, mtime($srcfile)));
+			genpage($page, $content, mtime($srcfile)));
 		$oldpagemtime{$page}=time;
 		$renderedfiles{$page}=htmlpage($page);
 	}
@@ -300,57 +193,6 @@ sub prune ($) { #{{{
 	}
 } #}}}
 
-sub estcfg () { #{{{
-	my $estdir="$config{wikistatedir}/hyperestraier";
-	my $cgi=basename($config{cgiurl});
-	$cgi=~s/\..*$//;
-	open(TEMPLATE, ">$estdir/$cgi.tmpl") ||
-		error("write $estdir/$cgi.tmpl: $!");
-	print TEMPLATE misctemplate("search", 
-		"<!--ESTFORM-->\n\n<!--ESTRESULT-->\n\n<!--ESTINFO-->\n\n");
-	close TEMPLATE;
-	open(TEMPLATE, ">$estdir/$cgi.conf") ||
-		error("write $estdir/$cgi.conf: $!");
-	my $template=HTML::Template->new(
-		filename => "$config{templatedir}/estseek.conf"
-	);
-	eval q{use Cwd 'abs_path'};
-	$template->param(
-		index => $estdir,
-		tmplfile => "$estdir/$cgi.tmpl",
-		destdir => abs_path($config{destdir}),
-		url => $config{url},
-	);
-	print TEMPLATE $template->output;
-	close TEMPLATE;
-	$cgi="$estdir/".basename($config{cgiurl});
-	unlink($cgi);
-	symlink("/usr/lib/estraier/estseek.cgi", $cgi) ||
-		error("symlink $cgi: $!");
-} # }}}
-
-sub estcmd ($;@) { #{{{
-	my @params=split(' ', shift);
-	push @params, "-cl", "$config{wikistatedir}/hyperestraier";
-	if (@_) {
-		push @params, "-";
-	}
-	
-	my $pid=open(CHILD, "|-");
-	if ($pid) {
-		# parent
-		foreach (@_) {
-			print CHILD "$_\n";
-		}
-		close(CHILD) || error("estcmd @params exited nonzero: $?");
-	}
-	else {
-		# child
-		open(STDOUT, "/dev/null"); # shut it up (closing won't work)
-		exec("estcmd", @params) || error("can't run estcmd");
-	}
-} #}}}
-
 sub refresh () { #{{{
 	# find existing pages
 	my %exists;
@@ -359,6 +201,7 @@ sub refresh () { #{{{
 	find({
 		no_chdir => 1,
 		wanted => sub {
+			$_=decode_utf8($_);
 			if (/$config{wiki_file_prune_regexp}/) {
 				$File::Find::prune=1;
 			}
@@ -378,6 +221,7 @@ sub refresh () { #{{{
 	find({
 		no_chdir => 1,
 		wanted => sub {
+			$_=decode_utf8($_);
 			if (/$config{wiki_file_prune_regexp}/) {
 				$File::Find::prune=1;
 			}
@@ -410,9 +254,14 @@ sub refresh () { #{{{
 			debug("new page $page") unless exists $pagectime{$page};
 			push @add, $file;
 			$links{$page}=[];
+			$pagecase{lc $page}=$page;
 			$pagesources{$page}=$file;
-			$pagectime{$page}=mtime(srcfile($file))
-				unless exists $pagectime{$page};
+			if ($config{getctime} && -e "$config{srcdir}/$file") {
+				$pagectime{$page}=rcs_getctime("$config{srcdir}/$file");
+			}
+			elsif (! exists $pagectime{$page}) {
+				$pagectime{$page}=mtime(srcfile($file));
+			}
 		}
 	}
 	my @del;
@@ -432,8 +281,9 @@ sub refresh () { #{{{
 		my $page=pagename($file);
 		
 		if (! exists $oldpagemtime{$page} ||
-		    mtime(srcfile($file)) > $oldpagemtime{$page}) {
-		    	debug("rendering changed file $file");
+		    mtime(srcfile($file)) > $oldpagemtime{$page} ||
+	    	    $forcerebuild{$page}) {
+		    	debug("rendering $file");
 			render($file);
 			$rendered{$file}=1;
 		}
@@ -462,7 +312,7 @@ FILE:		foreach my $file (@files) {
 	}
 
 	# Handle backlinks; if a page has added/removed links, update the
-	# pages it links to. Also handles rebuilding dependat pages.
+	# pages it links to. Also handles rebuilding dependant pages.
 	# TODO: inefficient; pages may get rendered above and again here;
 	# problem is the backlinks could be wrong in the first pass render
 	# above
@@ -473,7 +323,7 @@ FILE:		foreach my $file (@files) {
 				foreach my $file (keys %rendered, @del) {
 					next if $f eq $file;
 					my $page=pagename($file);
-					if (globlist_match($page, $depends{$p})) {
+					if (pagespec_match($page, $depends{$p})) {
 						debug("rendering $f, which depends on $page");
 						render($f);
 						$rendered{$f}=1;
@@ -516,20 +366,37 @@ FILE:		foreach my $file (@files) {
 		}
 	}
 
-	if ($config{hyperestraier} && (%rendered || @del)) {
-		debug("updating hyperestraier search index");
-		if (%rendered) {
-			estcmd("gather -cm -bc -cl -sd", 
-				map { $config{destdir}."/".$renderedfiles{pagename($_)} }
-				keys %rendered);
-		}
-		if (@del) {
-			estcmd("purge -cl");
-		}
-		
-		debug("generating hyperestraier cgi config");
-		estcfg();
+	if (@del) {
+		run_hooks(delete => sub { shift->(@del) });
+	}
+	if (%rendered) {
+		run_hooks(change => sub { shift->(keys %rendered) });
 	}
 } #}}}
 
+sub commandline_render () { #{{{
+	loadplugins();
+	checkconfig();
+	lockwiki();
+	loadindex();
+	unlockwiki();
+
+	my $srcfile=possibly_foolish_untaint($config{render});
+	my $file=$srcfile;
+	$file=~s/\Q$config{srcdir}\E\/?//;
+
+	my $type=pagetype($file);
+	die "ikiwiki: cannot render $srcfile\n" unless defined $type;
+	my $content=readfile($srcfile);
+	my $page=pagename($file);
+	$pagesources{$page}=$file;
+	$content=filter($page, $content);
+	$content=preprocess($page, $page, $content);
+	$content=linkify($page, $page, $content);
+	$content=htmlize($page, $type, $content);
+
+	print genpage($page, $content, mtime($srcfile));
+	exit 0;
+} #}}}
+
 1