X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/53b1c6f559c1d09fbdbc28c8e4d5090dd455cd26..e4cd168ebedd95585290c97ff42234344bfed46c:/IkiWiki.pm
diff --git a/IkiWiki.pm b/IkiWiki.pm
index d0cfb2d57..35fee1aa7 100644
--- a/IkiWiki.pm
+++ b/IkiWiki.pm
@@ -14,19 +14,19 @@ use open qw{:utf8 :std};
use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase
%pagestate %wikistate %renderedfiles %oldrenderedfiles
%pagesources %destsources %depends %hooks %forcerebuild
- $gettext_obj %loaded_plugins};
+ %loaded_plugins};
use Exporter q{import};
our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
- bestlink htmllink readfile writefile pagetype srcfile pagename
- displaytime will_render gettext urlto targetpage
- add_underlay pagetitle titlepage linkpage newpagefile
- inject
+ pagespec_match_list bestlink htmllink readfile writefile
+ pagetype srcfile pagename displaytime will_render gettext urlto
+ targetpage add_underlay pagetitle titlepage linkpage
+ newpagefile inject add_link
%config %links %pagestate %wikistate %renderedfiles
%pagesources %destsources);
our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-our $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
+our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
# Optimisation.
use Memoize;
@@ -157,6 +157,13 @@ sub getsetup () {
safe => 0, # path
rebuild => 0,
},
+ underlaydirbase => {
+ type => "internal",
+ default => "$installdir/share/ikiwiki",
+ description => "parent directory containing additional underlays",
+ safe => 0,
+ rebuild => 0,
+ },
wrappers => {
type => "internal",
default => [],
@@ -213,6 +220,13 @@ sub getsetup () {
safe => 1,
rebuild => 1,
},
+ discussionpage => {
+ type => "string",
+ default => gettext("Discussion"),
+ description => "name of Discussion pages",
+ safe => 1,
+ rebuild => 1,
+ },
sslcookie => {
type => "boolean",
default => 0,
@@ -452,7 +466,7 @@ sub checkconfig () {
if (defined $config{locale}) {
if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) {
$ENV{LANG}=$config{locale};
- $gettext_obj=undef;
+ define_gettext();
}
}
@@ -715,7 +729,7 @@ sub add_underlay ($) {
my $dir=shift;
if ($dir !~ /^\//) {
- $dir="$config{underlaydir}/../$dir";
+ $dir="$config{underlaydirbase}/$dir";
}
if (! grep { $_ eq $dir } @{$config{underlaydirs}}) {
@@ -1056,6 +1070,41 @@ sub htmllink ($$$;@) {
return "$linktext";
}
+sub openiduser ($) {
+ my $user=shift;
+
+ if ($user =~ m!^https?://! &&
+ eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) {
+ my $display;
+
+ if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) {
+ # this works in at least 2.x
+ $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user);
+ }
+ else {
+ # this only works in 1.x
+ my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user);
+ $display=$oid->display;
+ }
+
+ # Convert "user.somehost.com" to "user [somehost.com]"
+ # (also "user.somehost.co.uk")
+ if ($display !~ /\[/) {
+ $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/;
+ }
+ # Convert "http://somehost.com/user" to "user [somehost.com]".
+ # (also "https://somehost.com/user/")
+ if ($display !~ /\[/) {
+ $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/;
+ }
+ $display=~s!^https?://!!; # make sure this is removed
+ eval q{use CGI 'escapeHTML'};
+ error($@) if $@;
+ return escapeHTML($display);
+ }
+ return;
+}
+
sub userlink ($) {
my $user=shift;
@@ -1206,9 +1255,10 @@ sub preprocess ($$$;$$) {
);
};
if ($@) {
- chomp $@;
+ my $error=$@;
+ chomp $error;
$ret="[[!$command ".
- gettext("Error").": $@"."]]";
+ gettext("Error").": $error"."]]";
}
}
else {
@@ -1246,7 +1296,7 @@ sub preprocess ($$$;$$) {
|
"[^"]+" # single-quoted value
|
- [^\s\]]+ # unquoted value
+ [^"\s\]]+ # unquoted value
)
\s* # whitespace or end
# of directive
@@ -1269,7 +1319,7 @@ sub preprocess ($$$;$$) {
|
"[^"]+" # single-quoted value
|
- [^\s\]]+ # unquoted value
+ [^"\s\]]+ # unquoted value
)
\s* # whitespace or end
# of directive
@@ -1457,8 +1507,13 @@ sub loadindex () {
$links{$page}=$d->{links};
$oldlinks{$page}=[@{$d->{links}}];
}
- if (exists $d->{depends}) {
- $depends{$page}=$d->{depends};
+ if (exists $d->{dependslist}) {
+ $depends{$page}={
+ map { $_ => 1 } @{$d->{dependslist}}
+ };
+ }
+ elsif (exists $d->{depends}) {
+ $depends{$page}={$d->{depends} => 1};
}
if (exists $d->{state}) {
$pagestate{$page}=$d->{state};
@@ -1504,7 +1559,7 @@ sub saveindex () {
};
if (exists $depends{$page}) {
- $index{page}{$src}{depends} = $depends{$page};
+ $index{page}{$src}{dependslist} = [ keys %{$depends{$page}} ];
}
if (exists $pagestate{$page}) {
@@ -1671,25 +1726,22 @@ sub rcs_receive () {
$hooks{rcs}{rcs_receive}{call}->();
}
-sub safequote ($) {
- my $s=shift;
- $s=~s/[{}]//g;
- return "q{$s}";
-}
-
sub add_depends ($$) {
my $page=shift;
my $pagespec=shift;
-
- return unless pagespec_valid($pagespec);
- if (! exists $depends{$page}) {
- $depends{$page}=$pagespec;
- }
- else {
- $depends{$page}=pagespec_merge($depends{$page}, $pagespec);
+ if (ref $pagespec eq 'ARRAY') {
+ foreach my $ps (@$pagespec) {
+ if (pagespec_valid($ps)) {
+ $depends{$page}{$ps} = 1;
+ }
+ }
+ return;
}
+ return unless pagespec_valid($pagespec);
+
+ $depends{$page}{$pagespec} = 1;
return 1;
}
@@ -1703,29 +1755,37 @@ sub file_pruned ($$) {
return $file =~ m/$regexp/ && $file ne $base;
}
-sub gettext {
- # Only use gettext in the rare cases it's needed.
+sub define_gettext () {
+ # If translation is needed, redefine the gettext function to do it.
+ # Otherwise, it becomes a quick no-op.
+ no warnings 'redefine';
if ((exists $ENV{LANG} && length $ENV{LANG}) ||
(exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
(exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
- if (! $gettext_obj) {
- $gettext_obj=eval q{
+ *gettext=sub {
+ my $gettext_obj=eval q{
use Locale::gettext q{textdomain};
Locale::gettext->domain('ikiwiki')
};
- if ($@) {
- print STDERR "$@";
- $gettext_obj=undef;
+
+ if ($gettext_obj) {
+ $gettext_obj->get(shift);
+ }
+ else {
return shift;
}
- }
- return $gettext_obj->get(shift);
+ };
}
else {
- return shift;
+ *gettext=sub { return shift };
}
}
+sub gettext {
+ define_gettext();
+ gettext(@_);
+}
+
sub yesno ($) {
my $val=shift;
@@ -1757,6 +1817,14 @@ sub inject {
use warnings;
}
+sub add_link ($$) {
+ my $page=shift;
+ my $link=shift;
+
+ push @{$links{$page}}, $link
+ unless grep { $_ eq $link } @{$links{$page}};
+}
+
sub pagespec_merge ($$) {
my $a=shift;
my $b=shift;
@@ -1770,6 +1838,7 @@ sub pagespec_translate ($) {
# Convert spec to perl code.
my $code="";
+ my @data;
while ($spec=~m{
\s* # ignore whitespace
( # 1: match a single word
@@ -1797,14 +1866,17 @@ sub pagespec_translate ($) {
}
elsif ($word =~ /^(\w+)\((.*)\)$/) {
if (exists $IkiWiki::PageSpec::{"match_$1"}) {
- $code.="IkiWiki::PageSpec::match_$1(\$page, ".safequote($2).", \@_)";
+ push @data, $2;
+ $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
}
else {
- $code.="IkiWiki::FailReason->new(".safequote(qq{unknown function in pagespec "$word"}).")";
+ push @data, qq{unknown function in pagespec "$word"};
+ $code.="IkiWiki::ErrorReason->new(\$data[$#data])";
}
}
else {
- $code.=" IkiWiki::PageSpec::match_glob(\$page, ".safequote($word).", \@_)";
+ push @data, $word;
+ $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
}
}
@@ -1827,11 +1899,35 @@ sub pagespec_match ($$;@) {
}
my $sub=pagespec_translate($spec);
- return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"")
+ return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
if $@ || ! defined $sub;
return $sub->($page, @params);
}
+sub pagespec_match_list ($$;@) {
+ my $pages=shift;
+ my $spec=shift;
+ my @params=@_;
+
+ my $sub=pagespec_translate($spec);
+ error "syntax error in pagespec \"$spec\""
+ if $@ || ! defined $sub;
+
+ my @ret;
+ my $r;
+ foreach my $page (@$pages) {
+ $r=$sub->($page, @params);
+ push @ret, $page if $r;
+ }
+
+ if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) {
+ error(sprintf(gettext("cannot match pages: %s"), $r));
+ }
+ else {
+ return @ret;
+ }
+}
+
sub pagespec_valid ($) {
my $spec=shift;
@@ -1861,6 +1957,10 @@ sub new {
return bless \$value, $class;
}
+package IkiWiki::ErrorReason;
+
+our @ISA = 'IkiWiki::FailReason';
+
package IkiWiki::SuccessReason;
use overload (
@@ -1964,7 +2064,7 @@ sub match_created_before ($$;@) {
}
}
else {
- return IkiWiki::FailReason->new("$testpage has no ctime");
+ return IkiWiki::ErrorReason->new("$testpage does not exist");
}
}
@@ -1984,7 +2084,7 @@ sub match_created_after ($$;@) {
}
}
else {
- return IkiWiki::FailReason->new("$testpage has no ctime");
+ return IkiWiki::ErrorReason->new("$testpage does not exist");
}
}
@@ -2021,7 +2121,7 @@ sub match_user ($$;@) {
my %params=@_;
if (! exists $params{user}) {
- return IkiWiki::FailReason->new("no user specified");
+ return IkiWiki::ErrorReason->new("no user specified");
}
if (defined $params{user} && lc $params{user} eq lc $user) {
@@ -2041,7 +2141,7 @@ sub match_admin ($$;@) {
my %params=@_;
if (! exists $params{user}) {
- return IkiWiki::FailReason->new("no user specified");
+ return IkiWiki::ErrorReason->new("no user specified");
}
if (defined $params{user} && IkiWiki::is_admin($params{user})) {
@@ -2061,7 +2161,7 @@ sub match_ip ($$;@) {
my %params=@_;
if (! exists $params{ip}) {
- return IkiWiki::FailReason->new("no IP specified");
+ return IkiWiki::ErrorReason->new("no IP specified");
}
if (defined $params{ip} && lc $params{ip} eq lc $ip) {