memoize('pagename');
memoize('bestlink');
-my ($srcdir)= shift =~ /(.*)/; # untaint
-my ($destdir)= shift =~ /(.*)/; # untaint
+sub usage {
+ die "usage: ikiwiki [options] source dest\n";
+}
+
my $link=qr/\[\[([^\s]+)\]\]/;
-my $verbose=1;
+my $verbose=0;
+my $rebuild=0;
my $wikiname="wiki";
+if (grep /^-/, @ARGV) {
+ eval {use Getopt::Long};
+ GetOptions(
+ "wikiname=s" => \$wikiname,
+ "verbose|v" => \$verbose,
+ "rebuild" => \$rebuild,
+ ) || usage();
+}
+usage() unless @ARGV == 2;
+my ($srcdir) = shift =~ /(.*)/; # untaint
+my ($destdir) = shift =~ /(.*)/; # untaint
my %links;
+my %oldlinks;
my %oldpagemtime;
my %renderedfiles;
+my %pagesources;
sub error ($) {
die @_;
}
} while $cwd=~s!/?[^/]+$!!;
- print STDERR "warning: page $page, broken link: $link\n";
+ #print STDERR "warning: page $page, broken link: $link\n";
return "";
}
return $link if $page eq $bestlink;
+ # TODO BUG: %renderedfiles may not have it, if the linked to page
+ # was also added and isn't yet rendered! Note that this bug is
+ # masked by the bug mentioned below that makes all new files
+ # be rendered twice.
if (! grep { $_ eq $bestlink } values %renderedfiles) {
$bestlink=htmlpage($bestlink);
}
}
}
+sub linkbacks ($$) {
+ my $content=shift;
+ my $page=shift;
+
+ my @links;
+ foreach my $p (keys %links) {
+ next if bestlink($page, $p) eq $page;
+ if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
+ my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
+
+ # Trim common dir prefixes from both pages.
+ my $p_trimmed=$p;
+ my $page_trimmed=$page;
+ my $dir;
+ 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
+ defined $dir &&
+ $p_trimmed=~s/^\Q$dir\E// &&
+ $page_trimmed=~s/^\Q$dir\E//;
+
+ push @links, "<a href=\"$href\">$p_trimmed</a>";
+ }
+ }
+
+ $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
+ return $content;
+}
+
sub finalize ($$) {
my $content=shift;
my $page=shift;
my $path="";
foreach my $dir (reverse split("/", $page)) {
if (length($pagelink)) {
- $pagelink="<a href=\"$path$dir.html\">$dir/</a> $pagelink";
+ $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
}
else {
$pagelink=$dir;
}
$path.="../";
}
- $path=~s/\.\.\///;
- $pagelink="<a href=\"$path\">$wikiname/</a> $pagelink";
+ $path=~s/\.\.\/$/index.html/;
+ $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
$content="<html>\n<head><title>$title</title></head>\n<body>\n".
"<h1>$pagelink</h1>\n".
my $content=readpage($file);
if ($type ne 'unknown') {
my $page=pagename($file);
+
$links{$page}=[findlinks($content)];
-
+
$content=linkify($content, $file);
$content=htmlize($type, $content);
+ $content=linkbacks($content, $page);
$content=finalize($content, $page);
writepage(htmlpage($page), $content);
sub loadindex () {
open (IN, "$srcdir/.index") || return;
while (<IN>) {
+ ($_)=/(.*)/; # untaint
chomp;
- my ($mtime, $page, $rendered, @links)=split(' ', $_);
+ my ($mtime, $file, $rendered, @links)=split(' ', $_);
+ my $page=pagename($file);
+ $pagesources{$page}=$file;
$oldpagemtime{$page}=$mtime;
- $links{$page}=\@links;
- ($renderedfiles{$page})=$rendered=~m/(.*)/; # untaint
+ $oldlinks{$page}=[@links];
+ $links{$page}=[@links];
+ $renderedfiles{$page}=$rendered;
}
close IN;
}
sub saveindex () {
open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
foreach my $page (keys %oldpagemtime) {
- print OUT "$oldpagemtime{$page} $page $renderedfiles{$page} ".
+ print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
join(" ", @{$links{$page}})."\n"
if $oldpagemtime{$page};
}
},
}, $srcdir);
+ my %rendered;
+
# check for added or removed pages
- my @adddel;
+ my @add;
foreach my $file (@files) {
my $page=pagename($file);
if (! $oldpagemtime{$page}) {
debug("new page $page");
- push @adddel, $page;
+ push @add, $file;
$links{$page}=[];
+ $pagesources{$page}=$file;
}
}
+ my @del;
foreach my $page (keys %oldpagemtime) {
if (! $exists{$page}) {
debug("removing old page $page");
+ push @del, $renderedfiles{$page};
prune($destdir."/".$renderedfiles{$page});
delete $renderedfiles{$page};
$oldpagemtime{$page}=0;
- push @adddel, $page;
+ delete $pagesources{$page};
}
}
mtime("$srcdir/$file") > $oldpagemtime{$page}) {
debug("rendering changed file $file");
render($file);
+ $rendered{$file}=1;
}
}
# if any files were added or removed, check to see if each page
# needs an update due to linking to them
- if (@adddel) {
+ # TODO: inefficient; pages may get rendered above and again here;
+ # problem is the bestlink may have changed and we won't know until
+ # now
+ if (@add || @del) {
FILE: foreach my $file (@files) {
my $page=pagename($file);
- foreach my $p (@adddel) {
+ foreach my $f (@add, @del) {
+ my $p=pagename($f);
foreach my $link (@{$links{$page}}) {
if (bestlink($page, $link) eq $p) {
debug("rendering $file, which links to $p");
render($file);
+ $rendered{$file}=1;
next FILE;
}
}
}
}
}
+
+ # handle linkbacks; if a page has added/removed links, update the
+ # pages it links to
+ # TODO: inefficient; pages may get rendered above and again here;
+ # problem is the linkbacks could be wrong in the first pass render
+ # above
+ if (%rendered) {
+ my %linkchanged;
+ foreach my $file (keys %rendered, @del) {
+ my $page=pagename($file);
+ if (exists $links{$page}) {
+ foreach my $link (@{$links{$page}}) {
+ $link=bestlink($page, $link);
+ if (length $link &&
+ ! exists $oldlinks{$page} ||
+ ! grep { $_ eq $link } @{$oldlinks{$page}}) {
+ $linkchanged{$link}=1;
+ }
+ }
+ }
+ if (exists $oldlinks{$page}) {
+ foreach my $link (@{$oldlinks{$page}}) {
+ $link=bestlink($page, $link);
+ if (length $link &&
+ ! exists $links{$page} ||
+ ! grep { $_ eq $link } @{$links{$page}}) {
+ $linkchanged{$link}=1;
+ }
+ }
+ }
+ }
+ foreach my $link (keys %linkchanged) {
+ my $linkfile=$pagesources{$link};
+ if (defined $linkfile) {
+ debug("rendering $linkfile, to update its linkbacks");
+ render($linkfile);
+ }
+ }
+ }
}
-loadindex();
+loadindex() unless $rebuild;
refresh();
saveindex();