use Exporter q{import};
our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
bestlink htmllink readfile writefile pagetype srcfile pagename
- displaytime
+ displaytime will_render
%config %links %renderedfiles %pagesources);
-our $VERSION = 1.01;
+our $VERSION = 1.01; # plugin interface version
# Optimisation.
use Memoize;
memoize("pagespec_translate");
my $installdir=''; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE
+our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
sub defaultconfig () { #{{{
- wiki_file_prune_regexp => qr{((^|/).svn/|\.\.|^\.|\/\.|\.x?html?$|\.rss$|.arch-ids/|{arch}/)},
+ wiki_file_prune_regexp => qr{((^|/).svn/|\.\.|^\.|\/\.|\.x?html?$|\.rss$|\.atom$|.arch-ids/|{arch}/)},
wiki_link_regexp => qr/\[\[(?:([^\]\|]+)\|)?([^\s\]]+)\]\]/,
wiki_file_regexp => qr/(^[-[:alnum:]_.:\/+]+$)/,
+ web_commit_regexp => qr/^web commit (by (.*?(?=: |$))|from (\d+\.\d+\.\d+\.\d+)):?(.*)/,
verbose => 0,
syslog => 0,
wikiname => "wiki",
diffurl => '',
anonok => 0,
rss => 0,
+ atom => 0,
discussion => 1,
rebuild => 0,
refresh => 0,
setup => undef,
adminuser => undef,
adminemail => undef,
- plugin => [qw{mdwn inline htmlscrubber}],
+ plugin => [qw{mdwn inline htmlscrubber passwordauth}],
timeformat => '%c',
locale => undef,
sslcookie => 0,
}
if (defined $config{locale}) {
eval q{use POSIX};
+ error($@) if $@;
$ENV{LANG} = $config{locale}
if POSIX::setlocale(&POSIX::LC_TIME, $config{locale});
}
if ($config{w3mmode}) {
eval q{use Cwd q{abs_path}};
+ error($@) if $@;
$config{srcdir}=possibly_foolish_untaint(abs_path($config{srcdir}));
$config{destdir}=possibly_foolish_untaint(abs_path($config{destdir}));
$config{cgiurl}="file:///\$LIB/ikiwiki-w3m.cgi/".$config{cgiurl}
if ($config{cgi} && ! length $config{url}) {
error("Must specify url to wiki with --url when using --cgi\n");
}
- if ($config{rss} && ! length $config{url}) {
- error("Must specify url to wiki with --url when using --rss\n");
+ if (($config{rss} || $config{atom}) && ! length $config{url}) {
+ error("Must specify url to wiki with --url when using --rss or --atom\n");
}
$config{wikistatedir}="$config{srcdir}/.ikiwiki"
} #}}}
sub loadplugins () { #{{{
- foreach my $plugin (@{$config{plugin}}) {
- my $mod="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
- eval qq{use $mod};
- if ($@) {
- error("Failed to load plugin $mod: $@");
- }
- }
+ loadplugin($_) foreach @{$config{plugin}};
+
run_hooks(getopt => sub { shift->() });
if (grep /^-/, @ARGV) {
print STDERR "Unknown option: $_\n"
}
} #}}}
+sub loadplugin ($) { #{{{
+ my $plugin=shift;
+
+ my $mod="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
+ eval qq{use $mod};
+ if ($@) {
+ error("Failed to load plugin $mod: $@");
+ }
+} #}}}
+
sub error ($) { #{{{
if ($config{cgi}) {
print "Content-type: text/html\n\n";
close OUT;
} #}}}
+my %cleared;
sub will_render ($$;$) { #{{{
my $page=shift;
my $dest=shift;
error("$config{destdir}/$dest independently created, not overwriting with version from $page");
}
- if (! $clear) {
+ if (! $clear || $cleared{$page}) {
$renderedfiles{$page}=[$dest, grep { $_ ne $dest } @{$renderedfiles{$page}}];
}
else {
$renderedfiles{$page}=[$dest];
+ $cleared{$page}=1;
}
} #}}}
my $time=shift;
eval q{use POSIX};
+ error($@) if $@;
# strftime doesn't know about encodings, so make sure
# its output is properly treated as utf8
return decode_utf8(POSIX::strftime(
} #}}}
my %preprocessing;
-sub preprocess ($$$) { #{{{
+sub preprocess ($$$;$) { #{{{
my $page=shift; # the page the data comes from
my $destpage=shift; # the page the data will appear in (different for inline)
my $content=shift;
+ my $scan=shift;
my $handle=sub {
my $escape=shift;
return "[[$command $params]]";
}
elsif (exists $hooks{preprocess}{$command}) {
+ return "" if $scan && ! $hooks{preprocess}{$command}{scan};
# Note: preserve order of params, some plugins may
# consider it significant.
my @params;
return $content;
} #}}}
-sub filter ($$) {
+sub filter ($$) { #{{{
my $page=shift;
my $content=shift;
});
return $content;
-}
+} #}}}
sub indexlink () { #{{{
return "<a href=\"$config{url}\">$config{wikiname}</a>";
"ctime=$pagectime{$page} ".
"src=$pagesources{$page}";
$line.=" dest=$_" foreach @{$renderedfiles{$page}};
- $line.=" link=$_" foreach @{$links{$page}};
+ my %count;
+ $line.=" link=$_" foreach grep { ++$count{$_} == 1 } @{$links{$page}};
if (exists $depends{$page}) {
$line.=" depends=".encode_entities($depends{$page}, " \t\n");
}
if (! exists $param{type} || ! ref $param{call} || ! exists $param{id}) {
error "hook requires type, call, and id parameters";
}
+
+ return if $param{no_override} && exists $hooks{$param{type}}{$param{id}};
$hooks{$param{type}}{$param{id}}=\%param;
} # }}}
my $sub=shift;
if (exists $hooks{$type}) {
+ my @deferred;
foreach my $id (keys %{$hooks{$type}}) {
+ if ($hooks{$type}{$id}{last}) {
+ push @deferred, $id;
+ next;
+ }
+ $sub->($hooks{$type}{$id}{call});
+ }
+ foreach my $id (@deferred) {
$sub->($hooks{$type}{$id}{call});
}
}