X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/14b49376dc03672f896e5f1df3945a718a39d5a0..9ef4b5e677919f95dd2aa08ca51a0313c6c5542d:/IkiWiki.pm?ds=sidebyside

diff --git a/IkiWiki.pm b/IkiWiki.pm
index 08017635f..85d8eea68 100644
--- a/IkiWiki.pm
+++ b/IkiWiki.pm
@@ -13,26 +13,27 @@ use open qw{:utf8 :std};
 
 use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
 	    %pagestate %wikistate %renderedfiles %oldrenderedfiles
-	    %pagesources %destsources %depends %hooks %forcerebuild
-	    $gettext_obj %loaded_plugins};
+	    %pagesources %destsources %depends %depends_simple %hooks
+	    %forcerebuild %loaded_plugins};
 
 use Exporter q{import};
 our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
-                 bestlink htmllink readfile writefile pagetype srcfile pagename
-                 displaytime will_render gettext urlto targetpage
-		 add_underlay pagetitle titlepage linkpage newpagefile
-		 inject warning
+                 pagespec_match_list bestlink htmllink readfile writefile
+		 pagetype srcfile pagename displaytime will_render gettext urlto
+		 targetpage add_underlay pagetitle titlepage linkpage
+		 newpagefile inject add_link
                  %config %links %pagestate %wikistate %renderedfiles
                  %pagesources %destsources);
 our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
 our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-our $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
+our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
 
 # Optimisation.
 use Memoize;
 memoize("abs2rel");
 memoize("pagespec_translate");
 memoize("file_pruned");
+memoize("template_file");
 
 sub getsetup () {
 	wikiname => {
@@ -149,6 +150,13 @@ sub getsetup () {
 		safe => 0, # path
 		rebuild => 1,
 	},
+	templatedirs => {
+		type => "internal",
+		default => [],
+		description => "additional directories containing template files",
+		safe => 0,
+		rebuild => 0,
+	},
 	underlaydir => {
 		type => "string",
 		default => "$installdir/share/ikiwiki/basewiki",
@@ -157,6 +165,13 @@ sub getsetup () {
 		safe => 0, # path
 		rebuild => 0,
 	},
+	underlaydirbase => {
+		type => "internal",
+		default => "$installdir/share/ikiwiki",
+		description => "parent directory containing additional underlays",
+		safe => 0,
+		rebuild => 0,
+	},
 	wrappers => {
 		type => "internal",
 		default => [],
@@ -174,7 +189,7 @@ sub getsetup () {
 	verbose => {
 		type => "boolean",
 		example => 1,
-		description => "display verbose messages when building?",
+		description => "display verbose messages?",
 		safe => 1,
 		rebuild => 0,
 	},
@@ -213,6 +228,13 @@ sub getsetup () {
 		safe => 1,
 		rebuild => 1,
 	},
+	discussionpage => {
+		type => "string",
+		default => gettext("Discussion"),
+		description => "name of Discussion pages",
+		safe => 1,
+		rebuild => 1,
+	},
 	sslcookie => {
 		type => "boolean",
 		default => 0,
@@ -321,8 +343,8 @@ sub getsetup () {
 		default => [qr/(^|\/)\.\.(\/|$)/, qr/^\./, qr/\/\./,
 			qr/\.x?html?$/, qr/\.ikiwiki-new$/,
 			qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//,
-			qr/(^|\/)_MTN\//,
-			qr/\.dpkg-tmp$/],
+			qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//,
+			qr/(^|\/)CVS\//, qr/\.dpkg-tmp$/],
 		description => "regexps of source files to ignore",
 		safe => 0,
 		rebuild => 1,
@@ -342,7 +364,7 @@ sub getsetup () {
 	},
 	web_commit_regexp => {
 		type => "internal",
-		default => qr/^web commit (by (.*?(?=: |$))|from (\d+\.\d+\.\d+\.\d+)):?(.*)/,
+		default => qr/^web commit (by (.*?(?=: |$))|from ([0-9a-fA-F:.]+[0-9a-fA-F])):?(.*)/,
 		description => "regexp to parse web commits from logs",
 		safe => 0,
 		rebuild => 0,
@@ -452,7 +474,7 @@ sub checkconfig () {
 	if (defined $config{locale}) {
 		if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
 			$ENV{LANG}=$config{locale};
-			$gettext_obj=undef;
+			define_gettext();
 		}
 	}
 		
@@ -533,7 +555,7 @@ sub loadplugins () {
 
 	run_hooks(getopt => sub { shift->() });
 	if (grep /^-/, @ARGV) {
-		print STDERR "Unknown option: $_\n"
+		print STDERR "Unknown option (or missing parameter): $_\n"
 			foreach grep /^-/, @ARGV;
 		usage();
 	}
@@ -579,10 +601,6 @@ sub error ($;$) {
 	die $message."\n";
 }
 
-sub warning ($) {
-	return log_message(warning => @_);
-}
-
 sub debug ($) {
 	return unless $config{verbose};
 	return log_message(debug => @_);
@@ -631,30 +649,45 @@ sub dirname ($) {
 	return $file;
 }
 
-sub pagetype ($) {
+sub isinternal ($) {
 	my $page=shift;
+	return exists $pagesources{$page} &&
+		$pagesources{$page} =~ /\._([^.]+)$/;
+}
+
+sub pagetype ($) {
+	my $file=shift;
 	
-	if ($page =~ /\.([^.]+)$/) {
+	if ($file =~ /\.([^.]+)$/) {
 		return $1 if exists $hooks{htmlize}{$1};
 	}
+	my $base=basename($file);
+	if (exists $hooks{htmlize}{$base} &&
+	    $hooks{htmlize}{$base}{noextension}) {
+		return $base;
+	}
 	return;
 }
 
-sub isinternal ($) {
-	my $page=shift;
-	return exists $pagesources{$page} &&
-		$pagesources{$page} =~ /\._([^.]+)$/;
-}
+my %pagename_cache;
 
 sub pagename ($) {
 	my $file=shift;
 
+	if (exists $pagename_cache{$file}) {
+		return $pagename_cache{$file};
+	}
+
 	my $type=pagetype($file);
 	my $page=$file;
-	$page=~s/\Q.$type\E*$// if defined $type && !$hooks{htmlize}{$type}{keepextension};
+ 	$page=~s/\Q.$type\E*$//
+		if defined $type && !$hooks{htmlize}{$type}{keepextension}
+			&& !$hooks{htmlize}{$type}{noextension};
 	if ($config{indexpages} && $page=~/(.*)\/index$/) {
 		$page=$1;
 	}
+
+	$pagename_cache{$file} = $page;
 	return $page;
 }
 
@@ -712,7 +745,7 @@ sub add_underlay ($) {
 	my $dir=shift;
 
 	if ($dir !~ /^\//) {
-		$dir="$config{underlaydir}/../$dir";
+		$dir="$config{underlaydirbase}/$dir";
 	}
 
 	if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
@@ -1053,6 +1086,41 @@ sub htmllink ($$$;@) {
 	return "<a href=\"$bestlink\"@attrs>$linktext</a>";
 }
 
+sub openiduser ($) {
+	my $user=shift;
+
+	if ($user =~ m!^https?://! &&
+	    eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
+		my $display;
+
+		if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
+			# this works in at least 2.x
+			$display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
+		}
+		else {
+			# this only works in 1.x
+			my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
+			$display=$oid->display;
+		}
+
+		# Convert "user.somehost.com" to "user [somehost.com]"
+		# (also "user.somehost.co.uk")
+		if ($display !~ /\[/) {
+			$display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
+		}
+		# Convert "http://somehost.com/user" to "user [somehost.com]".
+		# (also "https://somehost.com/user/")
+		if ($display !~ /\[/) {
+			$display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/;
+		}
+		$display=~s!^https?://!!; # make sure this is removed
+		eval q{use CGI 'escapeHTML'};
+		error($@) if $@;
+		return escapeHTML($display);
+	}
+	return;
+}
+
 sub userlink ($) {
 	my $user=shift;
 
@@ -1203,9 +1271,10 @@ sub preprocess ($$$;$$) {
 					);
 				};
 				if ($@) {
-					chomp $@;
+					my $error=$@;
+					chomp $error;
 				 	$ret="[[!$command <span class=\"error\">".
-						gettext("Error").": $@"."</span>]]";
+						gettext("Error").": $error"."</span>]]";
 				}
 			}
 			else {
@@ -1243,7 +1312,7 @@ sub preprocess ($$$;$$) {
 						|
 						"[^"]+"		# single-quoted value
 						|
-						[^\s\]]+	# unquoted value
+						[^"\s\]]+	# unquoted value
 					)
 					\s*			# whitespace or end
 								# of directive
@@ -1266,7 +1335,7 @@ sub preprocess ($$$;$$) {
 						|
 						"[^"]+"		# single-quoted value
 						|
-						[^\s\]]+	# unquoted value
+						[^"\s\]]+	# unquoted value
 					)
 					\s*			# whitespace or end
 								# of directive
@@ -1297,6 +1366,70 @@ sub indexlink () {
 	return "<a href=\"$config{url}\">$config{wikiname}</a>";
 }
 
+sub check_canedit ($$$;$) {
+	my $page=shift;
+	my $q=shift;
+	my $session=shift;
+	my $nonfatal=shift;
+	
+	my $canedit;
+	run_hooks(canedit => sub {
+		return if defined $canedit;
+		my $ret=shift->($page, $q, $session);
+		if (defined $ret) {
+			if ($ret eq "") {
+				$canedit=1;
+			}
+			elsif (ref $ret eq 'CODE') {
+				$ret->() unless $nonfatal;
+				$canedit=0;
+			}
+			elsif (defined $ret) {
+				error($ret) unless $nonfatal;
+				$canedit=0;
+			}
+		}
+	});
+	return defined $canedit ? $canedit : 1;
+}
+
+sub check_content (@) {
+	my %params=@_;
+	
+	return 1 if ! exists $hooks{checkcontent}; # optimisation
+
+	if (exists $pagesources{$params{page}}) {
+		my @diff;
+		my %old=map { $_ => 1 }
+		        split("\n", readfile(srcfile($pagesources{$params{page}})));
+		foreach my $line (split("\n", $params{content})) {
+			push @diff, $line if ! exists $old{$_};
+		}
+		$params{diff}=join("\n", @diff);
+	}
+
+	my $ok;
+	run_hooks(checkcontent => sub {
+		return if defined $ok;
+		my $ret=shift->(%params);
+		if (defined $ret) {
+			if ($ret eq "") {
+				$ok=1;
+			}
+			elsif (ref $ret eq 'CODE') {
+				$ret->() unless $params{nonfatal};
+				$ok=0;
+			}
+			elsif (defined $ret) {
+				error($ret) unless $params{nonfatal};
+				$ok=0;
+			}
+		}
+
+	});
+	return defined $ok ? $ok : 1;
+}
+
 my $wikilock;
 
 sub lockwiki () {
@@ -1350,7 +1483,8 @@ sub loadindex () {
 	%oldrenderedfiles=%pagectime=();
 	if (! $config{rebuild}) {
 		%pagesources=%pagemtime=%oldlinks=%links=%depends=
-		%destsources=%renderedfiles=%pagecase=%pagestate=();
+		%destsources=%renderedfiles=%pagecase=%pagestate=
+		%depends_simple=();
 	}
 	my $in;
 	if (! open ($in, "<", "$config{wikistatedir}/indexdb")) {
@@ -1390,8 +1524,18 @@ sub loadindex () {
 				$links{$page}=$d->{links};
 				$oldlinks{$page}=[@{$d->{links}}];
 			}
-			if (exists $d->{depends}) {
-				$depends{$page}=$d->{depends};
+			if (exists $d->{depends_simple}) {
+				$depends_simple{$page}={
+					map { $_ => 1 } @{$d->{depends_simple}}
+				};
+			}
+			if (exists $d->{dependslist}) {
+				$depends{$page}={
+					map { $_ => 1 } @{$d->{dependslist}}
+				};
+			}
+			elsif (exists $d->{depends}) {
+				$depends{$page}={$d->{depends} => 1};
 			}
 			if (exists $d->{state}) {
 				$pagestate{$page}=$d->{state};
@@ -1437,7 +1581,11 @@ sub saveindex () {
 		};
 
 		if (exists $depends{$page}) {
-			$index{page}{$src}{depends} = $depends{$page};
+			$index{page}{$src}{dependslist} = [ keys %{$depends{$page}} ];
+		}
+
+		if (exists $depends_simple{$page}) {
+			$index{page}{$src}{depends_simple} = [ keys %{$depends_simple{$page}} ];
 		}
 
 		if (exists $pagestate{$page}) {
@@ -1469,7 +1617,8 @@ sub saveindex () {
 sub template_file ($) {
 	my $template=shift;
 
-	foreach my $dir ($config{templatedir}, "$installdir/share/ikiwiki/templates") {
+	foreach my $dir ($config{templatedir}, @{$config{templatedirs}},
+	                 "$installdir/share/ikiwiki/templates") {
 		return "$dir/$template" if -e "$dir/$template";
 	}
 	return;
@@ -1604,25 +1753,20 @@ sub rcs_receive () {
 	$hooks{rcs}{rcs_receive}{call}->();
 }
 
-sub safequote ($) {
-	my $s=shift;
-	$s=~s/[{}]//g;
-	return "q{$s}";
-}
-
 sub add_depends ($$) {
 	my $page=shift;
 	my $pagespec=shift;
-	
-	return unless pagespec_valid($pagespec);
 
-	if (! exists $depends{$page}) {
-		$depends{$page}=$pagespec;
-	}
-	else {
-		$depends{$page}=pagespec_merge($depends{$page}, $pagespec);
+	if ($pagespec =~ /$config{wiki_file_regexp}/ &&
+		$pagespec !~ /[\s*?()!]/) {
+		# a simple dependency, which can be matched by string eq
+		$depends_simple{$page}{lc $pagespec} = 1;
+		return 1;
 	}
 
+	return unless pagespec_valid($pagespec);
+
+	$depends{$page}{$pagespec} = 1;
 	return 1;
 }
 
@@ -1636,29 +1780,37 @@ sub file_pruned ($$) {
 	return $file =~ m/$regexp/ && $file ne $base;
 }
 
-sub gettext {
-	# Only use gettext in the rare cases it's needed.
+sub define_gettext () {
+	# If translation is needed, redefine the gettext function to do it.
+	# Otherwise, it becomes a quick no-op.
+	no warnings 'redefine';
 	if ((exists $ENV{LANG} && length $ENV{LANG}) ||
 	    (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
 	    (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
-		if (! $gettext_obj) {
-			$gettext_obj=eval q{
+	    	*gettext=sub {
+			my $gettext_obj=eval q{
 				use Locale::gettext q{textdomain};
 				Locale::gettext->domain('ikiwiki')
 			};
-			if ($@) {
-				print STDERR "$@";
-				$gettext_obj=undef;
+
+			if ($gettext_obj) {
+				$gettext_obj->get(shift);
+			}
+			else {
 				return shift;
 			}
-		}
-		return $gettext_obj->get(shift);
+		};
 	}
 	else {
-		return shift;
+		*gettext=sub { return shift };
 	}
 }
 
+sub gettext {
+	define_gettext();
+	gettext(@_);
+}
+
 sub yesno ($) {
 	my $val=shift;
 
@@ -1690,12 +1842,12 @@ sub inject {
 	use warnings;
 }
 
-sub pagespec_merge ($$) {
-	my $a=shift;
-	my $b=shift;
+sub add_link ($$) {
+	my $page=shift;
+	my $link=shift;
 
-	return $a if $a eq $b;
-	return "($a) or ($b)";
+	push @{$links{$page}}, $link
+		unless grep { $_ eq $link } @{$links{$page}};
 }
 
 sub pagespec_translate ($) {
@@ -1703,6 +1855,7 @@ sub pagespec_translate ($) {
 
 	# Convert spec to perl code.
 	my $code="";
+	my @data;
 	while ($spec=~m{
 		\s*		# ignore whitespace
 		(		# 1: match a single word
@@ -1730,19 +1883,22 @@ sub pagespec_translate ($) {
 		}
 		elsif ($word =~ /^(\w+)\((.*)\)$/) {
 			if (exists $IkiWiki::PageSpec::{"match_$1"}) {
-				$code.="IkiWiki::PageSpec::match_$1(\$page, ".safequote($2).", \@_)";
+				push @data, $2;
+				$code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
 			}
 			else {
-				$code.=' 0';
+				push @data, qq{unknown function in pagespec "$word"};
+				$code.="IkiWiki::ErrorReason->new(\$data[$#data])";
 			}
 		}
 		else {
-			$code.=" IkiWiki::PageSpec::match_glob(\$page, ".safequote($word).", \@_)";
+			push @data, $word;
+			$code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
 		}
 	}
 
 	if (! length $code) {
-		$code=0;
+		$code="IkiWiki::FailReason->new('empty pagespec')";
 	}
 
 	no warnings;
@@ -1760,11 +1916,35 @@ sub pagespec_match ($$;@) {
 	}
 
 	my $sub=pagespec_translate($spec);
-	return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"")
+	return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
 		if $@ || ! defined $sub;
 	return $sub->($page, @params);
 }
 
+sub pagespec_match_list ($$;@) {
+	my $pages=shift;
+	my $spec=shift;
+	my @params=@_;
+
+	my $sub=pagespec_translate($spec);
+	error "syntax error in pagespec \"$spec\""
+		if $@ || ! defined $sub;
+	
+	my @ret;
+	my $r;
+	foreach my $page (@$pages) {
+		$r=$sub->($page, @params);
+		push @ret, $page if $r;
+	}
+
+	if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) {
+		error(sprintf(gettext("cannot match pages: %s"), $r));
+	}
+	else {
+		return @ret;
+	}
+}
+
 sub pagespec_valid ($) {
 	my $spec=shift;
 
@@ -1794,6 +1974,10 @@ sub new {
 	return bless \$value, $class;
 }
 
+package IkiWiki::ErrorReason;
+
+our @ISA = 'IkiWiki::FailReason';
+
 package IkiWiki::SuccessReason;
 
 use overload (
@@ -1897,7 +2081,7 @@ sub match_created_before ($$;@) {
 		}
 	}
 	else {
-		return IkiWiki::FailReason->new("$testpage has no ctime");
+		return IkiWiki::ErrorReason->new("$testpage does not exist");
 	}
 }
 
@@ -1917,7 +2101,7 @@ sub match_created_after ($$;@) {
 		}
 	}
 	else {
-		return IkiWiki::FailReason->new("$testpage has no ctime");
+		return IkiWiki::ErrorReason->new("$testpage does not exist");
 	}
 }
 
@@ -1954,7 +2138,7 @@ sub match_user ($$;@) {
 	my %params=@_;
 	
 	if (! exists $params{user}) {
-		return IkiWiki::FailReason->new("no user specified");
+		return IkiWiki::ErrorReason->new("no user specified");
 	}
 
 	if (defined $params{user} && lc $params{user} eq lc $user) {
@@ -1974,7 +2158,7 @@ sub match_admin ($$;@) {
 	my %params=@_;
 	
 	if (! exists $params{user}) {
-		return IkiWiki::FailReason->new("no user specified");
+		return IkiWiki::ErrorReason->new("no user specified");
 	}
 
 	if (defined $params{user} && IkiWiki::is_admin($params{user})) {
@@ -1994,7 +2178,7 @@ sub match_ip ($$;@) {
 	my %params=@_;
 	
 	if (! exists $params{ip}) {
-		return IkiWiki::FailReason->new("no IP specified");
+		return IkiWiki::ErrorReason->new("no IP specified");
 	}
 
 	if (defined $params{ip} && lc $params{ip} eq lc $ip) {