backlinks => $backlinks,
more_backlinks => $more_backlinks,
mtime => displaytime($pagemtime{$page}),
+ ctime => displaytime($pagectime{$page}),
baseurl => baseurl($page),
);
return $content;
} #}}}
-sub mtime ($) { #{{{
- my $file=shift;
-
- return (stat($file))[9];
-} #}}}
-
sub scan ($) { #{{{
my $file=shift;
# Always needs to be done, since filters might add links
# to the content.
$content=filter($page, $page, $content);
-
- my @links;
- while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
- push @links, linkpage($2);
- }
+
if ($config{discussion}) {
# Discussion links are a special case since they're
# not in the text of the page, but on its template.
- push @links, $page."/".gettext("discussion");
+ $links{$page}=[ $page."/".gettext("discussion") ];
}
- $links{$page}=\@links;
-
+ else {
+ $links{$page}=[];
+ }
+
+ run_hooks(scan => sub {
+ shift->(
+ page => $page,
+ content => $content,
+ );
+ });
+
# Preprocess in scan-only mode.
preprocess($page, $page, $content, 1);
}
writefile($output, $config{destdir}, genpage($page, $content));
}
else {
- my $srcfd=readfile($srcfile, 1, 1);
delete $depends{$file};
will_render($file, $file, 1);
+
+ if ($config{hardlink}) {
+ prep_writefile($file, $config{destdir});
+ unlink($config{destdir}."/".$file);
+ if (link($srcfile, $config{destdir}."/".$file)) {
+ return;
+ }
+ # if hardlink fails, fall back to copying
+ }
+
+ my $srcfd=readfile($srcfile, 1, 1);
writefile($file, $config{destdir}, undef, 1, sub {
my $destfd=shift;
my $cleanup=shift;
if (file_pruned($_, $config{srcdir})) {
$File::Find::prune=1;
}
- elsif (! -d $_ && ! -l $_) {
+ elsif (! -l $_ && ! -d _) {
my ($f)=/$config{wiki_file_regexp}/; # untaint
if (! defined $f) {
warn(sprintf(gettext("skipping bad filename %s"), $_)."\n");
if (file_pruned($_, $dir)) {
$File::Find::prune=1;
}
- elsif (! -d $_ && ! -l $_) {
+ elsif (! -l $_ && ! -d _) {
my ($f)=/$config{wiki_file_regexp}/; # untaint
if (! defined $f) {
warn(sprintf(gettext("skipping bad filename %s"), $_)."\n");
# avoid underlaydir
# override attacks; see
# security.mdwn
- if (! -e "$config{srcdir}/$f" &&
- ! -l "$config{srcdir}/$f") {
+ if (! -l "$config{srcdir}/$f" &&
+ ! -e _) {
my $page=pagename($f);
if (! $exists{$page}) {
push @files, $f;
};
my (%rendered, @add, @del, @internal);
-
# check for added or removed pages
foreach my $file (@files) {
my $page=pagename($file);
}
else {
push @add, $file;
+ if ($config{getctime} && -e "$config{srcdir}/$file") {
+ eval {
+ my $time=rcs_getctime("$config{srcdir}/$file");
+ $pagectime{$page}=$time;
+ };
+ if ($@) {
+ print STDERR $@;
+ }
+ }
}
$pagecase{lc $page}=$page;
- if ($config{getctime} && -e "$config{srcdir}/$file") {
- $pagectime{$page}=rcs_getctime("$config{srcdir}/$file");
- }
- elsif (! exists $pagectime{$page}) {
- $pagectime{$page}=mtime(srcfile($file));
+ if (! exists $pagectime{$page}) {
+ $pagectime{$page}=(srcfile_stat($file))[10];
}
}
}
my @needsbuild;
foreach my $file (@files) {
my $page=pagename($file);
-
- my $mtime=mtime(srcfile($file));
+ my ($srcfile, @stat)=srcfile_stat($file);
if (! exists $pagemtime{$page} ||
- $mtime > $pagemtime{$page} ||
+ $stat[9] > $pagemtime{$page} ||
$forcerebuild{$page}) {
- $pagemtime{$page}=$mtime;
+ $pagemtime{$page}=$stat[9];
if (isinternal($page)) {
push @internal, $file;
# Preprocess internal page in scan-only mode.
- preprocess($page, $page, readfile(srcfile($file)), 1);
+ preprocess($page, $page, readfile($srcfile), 1);
}
else {
push @needsbuild, $file;
} #}}}
sub commandline_render () { #{{{
- loadplugins();
- checkconfig();
lockwiki();
loadindex();
unlockwiki();
$content=preprocess($page, $page, $content);
$content=linkify($page, $page, $content);
$content=htmlize($page, $type, $content);
- $pagemtime{$page}=mtime($srcfile);
+ $pagemtime{$page}=(stat($srcfile))[9];
print genpage($page, $content);
exit 0;