+sub hook (@) {
+ my %param=@_;
+
+ if (! exists $param{type} || ! ref $param{call} || ! exists $param{id}) {
+ error 'hook requires type, call, and id parameters';
+ }
+
+ return if $param{no_override} && exists $hooks{$param{type}}{$param{id}};
+
+ $hooks{$param{type}}{$param{id}}=\%param;
+ return 1;
+}
+
+sub run_hooks ($$) {
+ # Calls the given sub for each hook of the given type,
+ # passing it the hook function to call.
+ my $type=shift;
+ my $sub=shift;
+
+ if (exists $hooks{$type}) {
+ my (@first, @middle, @last);
+ foreach my $id (keys %{$hooks{$type}}) {
+ if ($hooks{$type}{$id}{first}) {
+ push @first, $id;
+ }
+ elsif ($hooks{$type}{$id}{last}) {
+ push @last, $id;
+ }
+ else {
+ push @middle, $id;
+ }
+ }
+ foreach my $id (@first, @middle, @last) {
+ $sub->($hooks{$type}{$id}{call});
+ }
+ }
+
+ return 1;
+}
+
+sub rcs_update () {
+ $hooks{rcs}{rcs_update}{call}->(@_);
+}
+
+sub rcs_prepedit ($) {
+ $hooks{rcs}{rcs_prepedit}{call}->(@_);
+}
+
+sub rcs_commit ($$$;$$) {
+ $hooks{rcs}{rcs_commit}{call}->(@_);
+}
+
+sub rcs_commit_staged ($$$) {
+ $hooks{rcs}{rcs_commit_staged}{call}->(@_);
+}
+
+sub rcs_add ($) {
+ $hooks{rcs}{rcs_add}{call}->(@_);
+}
+
+sub rcs_remove ($) {
+ $hooks{rcs}{rcs_remove}{call}->(@_);
+}
+
+sub rcs_rename ($$) {
+ $hooks{rcs}{rcs_rename}{call}->(@_);
+}
+
+sub rcs_recentchanges ($) {
+ $hooks{rcs}{rcs_recentchanges}{call}->(@_);
+}
+
+sub rcs_diff ($) {
+ $hooks{rcs}{rcs_diff}{call}->(@_);
+}
+
+sub rcs_getctime ($) {
+ $hooks{rcs}{rcs_getctime}{call}->(@_);
+}
+
+sub rcs_receive () {
+ $hooks{rcs}{rcs_receive}{call}->();
+}
+
+sub add_depends ($$;@) {
+ my $page=shift;
+ my $pagespec=shift;
+
+ # Is the pagespec a simple page name?
+ my $simple=$pagespec =~ /$config{wiki_file_regexp}/ &&
+ $pagespec !~ /[\s*?()!]/;
+
+ my $deptype=0;
+ if (@_) {
+ my %params=@_;
+
+ if ($params{presence}) {
+ # Is the pagespec limited to terms that will continue
+ # to match pages as long as those pages exist?
+ my $presence_limited=1;
+ while ($presence_limited && $pagespec=~m/(\w+)\([^\)]*\)/g) {
+ $presence_limited = $1 =~ /^(glob|internal|creation_month|creation_day|creation_year|created_before|created_after)$/;
+ }
+ if ($presence_limited) {
+ $deptype=$deptype | $DEPEND_PRESENCE;
+ }
+ else {
+ $deptype=$deptype | $DEPEND_CONTENT;
+ }
+ }
+ if ($params{links}) {
+ # Is the pagespec limited to terms that will continue
+ # to match pages as long as those pages exist and
+ # link to the same places?
+ my $links_limited=1;
+ while ($links_limited && $pagespec=~m/(\w+)\([^\)]*\)/g) {
+ $links_limited = $1 =~ /^(glob|internal|creation_month|creation_day|creation_year|created_before|created_after|backlink)$/;
+ }
+ if ($links_limited) {
+ $deptype=$deptype | $DEPEND_LINKS;
+ }
+ else {
+ $deptype=$deptype | $DEPEND_CONTENT;
+ }
+ }
+ }
+ $deptype=$DEPEND_CONTENT unless $deptype;
+
+ if ($simple) {
+ $depends_simple{$page}{lc $pagespec} |= $deptype;
+ return 1;
+ }
+
+ return unless pagespec_valid($pagespec);
+
+ $depends{$page}{$pagespec} |= $deptype;
+ return 1;
+}
+
+sub file_pruned ($$) {
+ require File::Spec;
+ my $file=File::Spec->canonpath(shift);
+ my $base=File::Spec->canonpath(shift);
+ $file =~ s#^\Q$base\E/+##;
+
+ my $regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')';
+ return $file =~ m/$regexp/ && $file ne $base;
+}
+
+sub define_gettext () {
+ # If translation is needed, redefine the gettext function to do it.
+ # Otherwise, it becomes a quick no-op.
+ no warnings 'redefine';
+ if ((exists $ENV{LANG} && length $ENV{LANG}) ||
+ (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) ||
+ (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) {
+ *gettext=sub {
+ my $gettext_obj=eval q{
+ use Locale::gettext q{textdomain};
+ Locale::gettext->domain('ikiwiki')
+ };
+
+ if ($gettext_obj) {
+ $gettext_obj->get(shift);
+ }
+ else {
+ return shift;
+ }
+ };
+ }
+ else {
+ *gettext=sub { return shift };
+ }
+}
+
+sub gettext {
+ define_gettext();
+ gettext(@_);
+}
+
+sub yesno ($) {
+ my $val=shift;
+
+ return (defined $val && (lc($val) eq gettext("yes") || lc($val) eq "yes" || $val eq "1"));
+}
+
+sub inject {
+ # Injects a new function into the symbol table to replace an
+ # exported function.
+ my %params=@_;
+
+ # This is deep ugly perl foo, beware.
+ no strict;
+ no warnings;
+ if (! defined $params{parent}) {
+ $params{parent}='::';
+ $params{old}=\&{$params{name}};
+ $params{name}=~s/.*:://;
+ }
+ my $parent=$params{parent};
+ foreach my $ns (grep /^\w+::/, keys %{$parent}) {
+ $ns = $params{parent} . $ns;
+ inject(%params, parent => $ns) unless $ns eq '::main::';
+ *{$ns . $params{name}} = $params{call}
+ if exists ${$ns}{$params{name}} &&
+ \&{${$ns}{$params{name}}} == $params{old};
+ }
+ use strict;
+ use warnings;
+}
+
+sub add_link ($$) {
+ my $page=shift;
+ my $link=shift;
+
+ push @{$links{$page}}, $link
+ unless grep { $_ eq $link } @{$links{$page}};
+}
+
+sub pagespec_translate ($) {
+ my $spec=shift;
+
+ # Convert spec to perl code.
+ my $code="";
+ my @data;
+ while ($spec=~m{
+ \s* # ignore whitespace
+ ( # 1: match a single word
+ \! # !
+ |
+ \( # (
+ |
+ \) # )
+ |
+ \w+\([^\)]*\) # command(params)
+ |
+ [^\s()]+ # any other text
+ )
+ \s* # ignore whitespace
+ }gx) {
+ my $word=$1;
+ if (lc $word eq 'and') {
+ $code.=' &&';
+ }
+ elsif (lc $word eq 'or') {
+ $code.=' ||';
+ }
+ elsif ($word eq "(" || $word eq ")" || $word eq "!") {
+ $code.=' '.$word;
+ }
+ elsif ($word =~ /^(\w+)\((.*)\)$/) {
+ if (exists $IkiWiki::PageSpec::{"match_$1"}) {
+ push @data, $2;
+ $code.="IkiWiki::PageSpec::match_$1(\$page, \$data[$#data], \@_)";
+ }
+ else {
+ push @data, qq{unknown function in pagespec "$word"};
+ $code.="IkiWiki::ErrorReason->new(\$data[$#data])";
+ }
+ }
+ else {
+ push @data, $word;
+ $code.=" IkiWiki::PageSpec::match_glob(\$page, \$data[$#data], \@_)";
+ }
+ }
+
+ if (! length $code) {
+ $code="IkiWiki::FailReason->new('empty pagespec')";
+ }
+
+ no warnings;
+ return eval 'sub { my $page=shift; '.$code.' }';
+}
+
+sub pagespec_match ($$;@) {
+ my $page=shift;
+ my $spec=shift;
+ my @params=@_;
+
+ # Backwards compatability with old calling convention.
+ if (@params == 1) {
+ unshift @params, 'location';
+ }
+
+ my $sub=pagespec_translate($spec);
+ return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"")
+ if $@ || ! defined $sub;
+ return $sub->($page, @params);
+}
+
+sub pagespec_match_list ($$;@) {
+ my $pages=shift;
+ my $spec=shift;
+ my @params=@_;
+
+ my $sub=pagespec_translate($spec);
+ error "syntax error in pagespec \"$spec\""
+ if $@ || ! defined $sub;
+
+ my @ret;
+ my $r;
+ foreach my $page (@$pages) {
+ $r=$sub->($page, @params);
+ push @ret, $page if $r;
+ }
+
+ if (! @ret && defined $r && $r->isa("IkiWiki::ErrorReason")) {
+ error(sprintf(gettext("cannot match pages: %s"), $r));
+ }
+ else {
+ return @ret;
+ }
+}
+
+sub pagespec_valid ($) {
+ my $spec=shift;
+
+ my $sub=pagespec_translate($spec);
+ return ! $@;
+}
+
+sub glob2re ($) {
+ my $re=quotemeta(shift);
+ $re=~s/\\\*/.*/g;
+ $re=~s/\\\?/./g;
+ return $re;
+}
+
+package IkiWiki::FailReason;
+
+use overload (
+ '""' => sub { ${$_[0]} },
+ '0+' => sub { 0 },
+ '!' => sub { bless $_[0], 'IkiWiki::SuccessReason'},
+ fallback => 1,
+);
+
+sub new {
+ my $class = shift;
+ my $value = shift;
+ return bless \$value, $class;
+}
+
+package IkiWiki::ErrorReason;
+
+our @ISA = 'IkiWiki::FailReason';
+
+package IkiWiki::SuccessReason;
+
+use overload (
+ '""' => sub { ${$_[0]} },
+ '0+' => sub { 1 },
+ '!' => sub { bless $_[0], 'IkiWiki::FailReason'},
+ fallback => 1,
+);
+
+sub new {
+ my $class = shift;
+ my $value = shift;
+ return bless \$value, $class;
+};
+
+package IkiWiki::PageSpec;
+
+sub derel ($$) {
+ my $path=shift;
+ my $from=shift;
+
+ if ($path =~ m!^\./!) {
+ $from=~s#/?[^/]+$## if defined $from;
+ $path=~s#^\./##;
+ $path="$from/$path" if length $from;
+ }
+
+ return $path;
+}
+
+sub match_glob ($$;@) {