X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/8fed4206cbeddd3c65c14efae69776f17f5fb78b..db041b8854d2d3f7137367e11e960ac4f6cdb6c5:/IkiWiki.pm
diff --git a/IkiWiki.pm b/IkiWiki.pm
index 2640f85a2..43ffb1fd8 100644
--- a/IkiWiki.pm
+++ b/IkiWiki.pm
@@ -14,19 +14,19 @@ use open qw{:utf8 :std};
use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
%pagestate %wikistate %renderedfiles %oldrenderedfiles
%pagesources %destsources %depends %hooks %forcerebuild
- $gettext_obj %loaded_plugins};
+ %loaded_plugins};
use Exporter q{import};
our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
- bestlink htmllink readfile writefile pagetype srcfile pagename
- displaytime will_render gettext urlto targetpage
- add_underlay pagetitle titlepage linkpage newpagefile
- inject
+ pagespec_match_list bestlink htmllink readfile writefile
+ pagetype srcfile pagename displaytime will_render gettext urlto
+ targetpage add_underlay pagetitle titlepage linkpage
+ newpagefile inject add_link
%config %links %pagestate %wikistate %renderedfiles
%pagesources %destsources);
our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-our $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
+our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
# Optimisation.
use Memoize;
@@ -157,6 +157,13 @@ sub getsetup () {
safe => 0, # path
rebuild => 0,
},
+ underlaydirbase => {
+ type => "internal",
+ default => "$installdir/share/ikiwiki",
+ description => "parent directory containing additional underlays",
+ safe => 0,
+ rebuild => 0,
+ },
wrappers => {
type => "internal",
default => [],
@@ -213,6 +220,13 @@ sub getsetup () {
safe => 1,
rebuild => 1,
},
+ discussionpage => {
+ type => "string",
+ default => gettext("Discussion"),
+ description => "name of Discussion pages",
+ safe => 1,
+ rebuild => 1,
+ },
sslcookie => {
type => "boolean",
default => 0,
@@ -321,7 +335,7 @@ sub getsetup () {
default => [qr/(^|\/)\.\.(\/|$)/, qr/^\./, qr/\/\./,
qr/\.x?html?$/, qr/\.ikiwiki-new$/,
qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//,
- qr/(^|\/)_MTN\//,
+ qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//,
qr/\.dpkg-tmp$/],
description => "regexps of source files to ignore",
safe => 0,
@@ -452,7 +466,7 @@ sub checkconfig () {
if (defined $config{locale}) {
if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
$ENV{LANG}=$config{locale};
- $gettext_obj=undef;
+ define_gettext();
}
}
@@ -533,7 +547,7 @@ sub loadplugins () {
run_hooks(getopt => sub { shift->() });
if (grep /^-/, @ARGV) {
- print STDERR "Unknown option: $_\n"
+ print STDERR "Unknown option (or missing parameter): $_\n"
foreach grep /^-/, @ARGV;
usage();
}
@@ -639,8 +653,10 @@ sub pagetype ($) {
if ($file =~ /\.([^.]+)$/) {
return $1 if exists $hooks{htmlize}{$1};
}
- elsif ($hooks{htmlize}{basename($file)}{noextension}) {
- return basename($file);
+ my $base=basename($file);
+ if (exists $hooks{htmlize}{$base} &&
+ $hooks{htmlize}{$base}{noextension}) {
+ return $base;
}
return;
}
@@ -713,7 +729,7 @@ sub add_underlay ($) {
my $dir=shift;
if ($dir !~ /^\//) {
- $dir="$config{underlaydir}/../$dir";
+ $dir="$config{underlaydirbase}/$dir";
}
if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
@@ -1054,6 +1070,41 @@ sub htmllink ($$$;@) {
return "$linktext";
}
+sub openiduser ($) {
+ my $user=shift;
+
+ if ($user =~ m!^https?://! &&
+ eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
+ my $display;
+
+ if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
+ # this works in at least 2.x
+ $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
+ }
+ else {
+ # this only works in 1.x
+ my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
+ $display=$oid->display;
+ }
+
+ # Convert "user.somehost.com" to "user [somehost.com]"
+ # (also "user.somehost.co.uk")
+ if ($display !~ /\[/) {
+ $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
+ }
+ # Convert "http://somehost.com/user" to "user [somehost.com]".
+ # (also "https://somehost.com/user/")
+ if ($display !~ /\[/) {
+ $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/;
+ }
+ $display=~s!^https?://!!; # make sure this is removed
+ eval q{use CGI 'escapeHTML'};
+ error($@) if $@;
+ return escapeHTML($display);
+ }
+ return;
+}
+
sub userlink ($) {
my $user=shift;
@@ -1204,9 +1255,10 @@ sub preprocess ($$$;$$) {
);
};
if ($@) {
- chomp $@;
+ my $error=$@;
+ chomp $error;
$ret="[[!$command ".
- gettext("Error").": $@"."]]";
+ gettext("Error").": $error"."]]";
}
}
else {
@@ -1244,7 +1296,7 @@ sub preprocess ($$$;$$) {
|
"[^"]+" # single-quoted value
|
- [^\s\]]+ # unquoted value
+ [^"\s\]]+ # unquoted value
)
\s* # whitespace or end
# of directive
@@ -1267,7 +1319,7 @@ sub preprocess ($$$;$$) {
|
"[^"]+" # single-quoted value
|
- [^\s\]]+ # unquoted value
+ [^"\s\]]+ # unquoted value
)
\s* # whitespace or end
# of directive
@@ -1669,12 +1721,6 @@ sub rcs_receive () {
$hooks{rcs}{rcs_receive}{call}->();
}
-sub safequote ($) {
- my $s=shift;
- $s=~s/[{}]//g;
- return "q{$s}";
-}
-
sub add_depends ($$) {
my $page=shift;
my $pagespec=shift;
@@ -1701,29 +1747,37 @@ sub file_pruned ($$) {
return $file =~ m/$regexp/ && $file ne $base;
}
-sub gettext {
- # Only use gettext in the rare cases it's needed.
+sub define_gettext () {
+ # If translation is needed, redefine the gettext function to do it.
+ # Otherwise, it becomes a quick no-op.
+ no warnings 'redefine';
if ((exists $ENV{LANG} && length $ENV{LANG}) ||
(exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
(exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
- if (! $gettext_obj) {
- $gettext_obj=eval q{
+ *gettext=sub {
+ my $gettext_obj=eval q{
use Locale::gettext q{textdomain};
Locale::gettext->domain('ikiwiki')
};
- if ($@) {
- print STDERR "$@";
- $gettext_obj=undef;
+
+ if ($gettext_obj) {
+ $gettext_obj->get(shift);
+ }
+ else {
return shift;
}
- }
- return $gettext_obj->get(shift);
+ };
}
else {
- return shift;
+ *gettext=sub { return shift };
}
}
+sub gettext {
+ define_gettext();
+ gettext(@_);
+}
+
sub yesno ($) {
my $val=shift;
@@ -1755,6 +1809,14 @@ sub inject {
use warnings;
}
+sub add_link ($$) {
+ my $page=shift;
+ my $link=shift;
+
+ push @{$links{$page}}, $link
+ unless grep { $_ eq $link } @{$links{$page}};
+}
+
sub pagespec_merge ($$) {
my $a=shift;
my $b=shift;
@@ -1768,6 +1830,7 @@ sub pagespec_translate ($) {
# Convert spec to perl code.
my $code="";
+ my @data;
while ($spec=~m{
\s* # ignore whitespace
( # 1: match a single word
@@ -1795,14 +1858,17 @@ sub pagespec_translate ($) {
}
elsif ($word =~ /^(\w+)\((.*)\)$/) {
if (exists $IkiWiki::PageSpec::{"match_$1"}) {
- $code.="IkiWiki::PageSpec::match_$1(\$page, ".safequote($2).", \@_)";
+ push @data, $2;
+ $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
}
else {
- $code.="IkiWiki::FailReason->new(".safequote(qq{unknown function in pagespec "$word"}).")";
+ push @data, qq{unknown function in pagespec "$word"};
+ $code.="IkiWiki::ErrorReason->new(\$data[$#data])";
}
}
else {
- $code.=" IkiWiki::PageSpec::match_glob(\$page, ".safequote($word).", \@_)";
+ push @data, $word;
+ $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
}
}
@@ -1825,11 +1891,35 @@ sub pagespec_match ($$;@) {
}
my $sub=pagespec_translate($spec);
- return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"")
+ return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
if $@ || ! defined $sub;
return $sub->($page, @params);
}
+sub pagespec_match_list ($$;@) {
+ my $pages=shift;
+ my $spec=shift;
+ my @params=@_;
+
+ my $sub=pagespec_translate($spec);
+ error "syntax error in pagespec \"$spec\""
+ if $@ || ! defined $sub;
+
+ my @ret;
+ my $r;
+ foreach my $page (@$pages) {
+ $r=$sub->($page, @params);
+ push @ret, $page if $r;
+ }
+
+ if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) {
+ error(sprintf(gettext("cannot match pages: %s"), $r));
+ }
+ else {
+ return @ret;
+ }
+}
+
sub pagespec_valid ($) {
my $spec=shift;
@@ -1859,6 +1949,10 @@ sub new {
return bless \$value, $class;
}
+package IkiWiki::ErrorReason;
+
+our @ISA = 'IkiWiki::FailReason';
+
package IkiWiki::SuccessReason;
use overload (
@@ -1962,7 +2056,7 @@ sub match_created_before ($$;@) {
}
}
else {
- return IkiWiki::FailReason->new("$testpage has no ctime");
+ return IkiWiki::ErrorReason->new("$testpage does not exist");
}
}
@@ -1982,7 +2076,7 @@ sub match_created_after ($$;@) {
}
}
else {
- return IkiWiki::FailReason->new("$testpage has no ctime");
+ return IkiWiki::ErrorReason->new("$testpage does not exist");
}
}
@@ -2019,7 +2113,7 @@ sub match_user ($$;@) {
my %params=@_;
if (! exists $params{user}) {
- return IkiWiki::FailReason->new("no user specified");
+ return IkiWiki::ErrorReason->new("no user specified");
}
if (defined $params{user} && lc $params{user} eq lc $user) {
@@ -2039,7 +2133,7 @@ sub match_admin ($$;@) {
my %params=@_;
if (! exists $params{user}) {
- return IkiWiki::FailReason->new("no user specified");
+ return IkiWiki::ErrorReason->new("no user specified");
}
if (defined $params{user} && IkiWiki::is_admin($params{user})) {
@@ -2059,7 +2153,7 @@ sub match_ip ($$;@) {
my %params=@_;
if (! exists $params{ip}) {
- return IkiWiki::FailReason->new("no IP specified");
+ return IkiWiki::ErrorReason->new("no IP specified");
}
if (defined $params{ip} && lc $params{ip} eq lc $ip) {