X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/ef69a6cce5a09e9972441b034e24076027fbc479..126952bd339bac590beae2324ff0f399883bc256:/IkiWiki.pm diff --git a/IkiWiki.pm b/IkiWiki.pm index 63da5d0dd..a11b330f2 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -14,19 +14,19 @@ use open qw{:utf8 :std}; use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %pagestate %wikistate %renderedfiles %oldrenderedfiles %pagesources %destsources %depends %hooks %forcerebuild - $gettext_obj %loaded_plugins}; + %loaded_plugins}; use Exporter q{import}; our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match - bestlink htmllink readfile writefile pagetype srcfile pagename - displaytime will_render gettext urlto targetpage - add_underlay pagetitle titlepage linkpage newpagefile - inject + pagespec_match_list bestlink htmllink readfile writefile + pagetype srcfile pagename displaytime will_render gettext urlto + targetpage add_underlay pagetitle titlepage linkpage + newpagefile inject add_link %config %links %pagestate %wikistate %renderedfiles %pagesources %destsources); our $VERSION = 3.00; # plugin interface version, next is ikiwiki version our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE -our $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE +our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE # Optimisation. use Memoize; @@ -157,6 +157,13 @@ sub getsetup () { safe => 0, # path rebuild => 0, }, + underlaydirbase => { + type => "internal", + default => "$installdir/share/ikiwiki", + description => "parent directory containing additional underlays", + safe => 0, + rebuild => 0, + }, wrappers => { type => "internal", default => [], @@ -321,7 +328,7 @@ sub getsetup () { default => [qr/(^|\/)\.\.(\/|$)/, qr/^\./, qr/\/\./, qr/\.x?html?$/, qr/\.ikiwiki-new$/, qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//, - qr/(^|\/)_MTN\//, + qr/(^|\/)_MTN\//, qr/(^|\/)_darcs\//, qr/\.dpkg-tmp$/], description => "regexps of source files to ignore", safe => 0, @@ -452,7 +459,7 @@ sub checkconfig () { if (defined $config{locale}) { if (POSIX::setlocale(&POSIX::LC_ALL, $config{locale})) { $ENV{LANG}=$config{locale}; - $gettext_obj=undef; + define_gettext(); } } @@ -715,7 +722,7 @@ sub add_underlay ($) { my $dir=shift; if ($dir !~ /^\//) { - $dir="$config{underlaydir}/../$dir"; + $dir="$config{underlaydirbase}/$dir"; } if (! grep { $_ eq $dir } @{$config{underlaydirs}}) { @@ -1056,6 +1063,41 @@ sub htmllink ($$$;@) { return "$linktext"; } +sub openiduser ($) { + my $user=shift; + + if ($user =~ m!^https?://! && + eval q{use Net::OpenID::VerifiedIdentity; 1} && !$@) { + my $display; + + if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) { + # this works in at least 2.x + $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user); + } + else { + # this only works in 1.x + my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user); + $display=$oid->display; + } + + # Convert "user.somehost.com" to "user [somehost.com]" + # (also "user.somehost.co.uk") + if ($display !~ /\[/) { + $display=~s/^([-a-zA-Z0-9]+?)\.([-.a-zA-Z0-9]+\.[a-z]+)$/$1 [$2]/; + } + # Convert "http://somehost.com/user" to "user [somehost.com]". + # (also "https://somehost.com/user/") + if ($display !~ /\[/) { + $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/; + } + $display=~s!^https?://!!; # make sure this is removed + eval q{use CGI 'escapeHTML'}; + error($@) if $@; + return escapeHTML($display); + } + return; +} + sub userlink ($) { my $user=shift; @@ -1246,7 +1288,7 @@ sub preprocess ($$$;$$) { | "[^"]+" # single-quoted value | - [^\s\]]+ # unquoted value + [^"\s\]]+ # unquoted value ) \s* # whitespace or end # of directive @@ -1269,7 +1311,7 @@ sub preprocess ($$$;$$) { | "[^"]+" # single-quoted value | - [^\s\]]+ # unquoted value + [^"\s\]]+ # unquoted value ) \s* # whitespace or end # of directive @@ -1671,12 +1713,6 @@ sub rcs_receive () { $hooks{rcs}{rcs_receive}{call}->(); } -sub safequote ($) { - my $s=shift; - $s=~s/[{}]//g; - return "q{$s}"; -} - sub add_depends ($$) { my $page=shift; my $pagespec=shift; @@ -1703,29 +1739,37 @@ sub file_pruned ($$) { return $file =~ m/$regexp/ && $file ne $base; } -sub gettext { - # Only use gettext in the rare cases it's needed. +sub define_gettext () { + # If translation is needed, redefine the gettext function to do it. + # Otherwise, it becomes a quick no-op. + no warnings 'redefine'; if ((exists $ENV{LANG} && length $ENV{LANG}) || (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) || (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) { - if (! $gettext_obj) { - $gettext_obj=eval q{ + *gettext=sub { + my $gettext_obj=eval q{ use Locale::gettext q{textdomain}; Locale::gettext->domain('ikiwiki') }; - if ($@) { - print STDERR "$@"; - $gettext_obj=undef; + + if ($gettext_obj) { + $gettext_obj->get(shift); + } + else { return shift; } - } - return $gettext_obj->get(shift); + }; } else { - return shift; + *gettext=sub { return shift }; } } +sub gettext { + define_gettext(); + gettext(@_); +} + sub yesno ($) { my $val=shift; @@ -1757,6 +1801,14 @@ sub inject { use warnings; } +sub add_link ($$) { + my $page=shift; + my $link=shift; + + push @{$links{$page}}, $link + unless grep { $_ eq $link } @{$links{$page}}; +} + sub pagespec_merge ($$) { my $a=shift; my $b=shift; @@ -1770,6 +1822,7 @@ sub pagespec_translate ($) { # Convert spec to perl code. my $code=""; + my @data; while ($spec=~m{ \s* # ignore whitespace ( # 1: match a single word @@ -1797,14 +1850,17 @@ sub pagespec_translate ($) { } elsif ($word =~ /^(\w+)\((.*)\)$/) { if (exists $IkiWiki::PageSpec::{"match_$1"}) { - $code.="IkiWiki::PageSpec::match_$1(\$page, ".safequote($2).", \@_)"; + push @data, $2; + $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)"; } else { - $code.="IkiWiki::FailReason->new(".safequote(qq{unknown function in pagespec "$word"}).")"; + push @data, qq{unknown function in pagespec "$word"}; + $code.="IkiWiki::ErrorReason->new(\$data[$#data])"; } } else { - $code.=" IkiWiki::PageSpec::match_glob(\$page, ".safequote($word).", \@_)"; + push @data, $word; + $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)"; } } @@ -1827,11 +1883,35 @@ sub pagespec_match ($$;@) { } my $sub=pagespec_translate($spec); - return IkiWiki::FailReason->new("syntax error in pagespec \"$spec\"") + return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"") if $@ || ! defined $sub; return $sub->($page, @params); } +sub pagespec_match_list ($$;@) { + my $pages=shift; + my $spec=shift; + my @params=@_; + + my $sub=pagespec_translate($spec); + error "syntax error in pagespec \"$spec\"" + if $@ || ! defined $sub; + + my @ret; + my $r; + foreach my $page (@$pages) { + $r=$sub->($page, @params); + push @ret, $page if $r; + } + + if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) { + error(sprintf(gettext("cannot match pages: %s"), $r)); + } + else { + return @ret; + } +} + sub pagespec_valid ($) { my $spec=shift; @@ -1861,6 +1941,10 @@ sub new { return bless \$value, $class; } +package IkiWiki::ErrorReason; + +our @ISA = 'IkiWiki::FailReason'; + package IkiWiki::SuccessReason; use overload ( @@ -2021,7 +2105,7 @@ sub match_user ($$;@) { my %params=@_; if (! exists $params{user}) { - return IkiWiki::FailReason->new("no user specified"); + return IkiWiki::ErrorReason->new("no user specified"); } if (defined $params{user} && lc $params{user} eq lc $user) { @@ -2041,7 +2125,7 @@ sub match_admin ($$;@) { my %params=@_; if (! exists $params{user}) { - return IkiWiki::FailReason->new("no user specified"); + return IkiWiki::ErrorReason->new("no user specified"); } if (defined $params{user} && IkiWiki::is_admin($params{user})) { @@ -2061,7 +2145,7 @@ sub match_ip ($$;@) { my %params=@_; if (! exists $params{ip}) { - return IkiWiki::FailReason->new("no IP specified"); + return IkiWiki::ErrorReason->new("no IP specified"); } if (defined $params{ip} && lc $params{ip} eq lc $ip) {