X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/2877f698959f410889f358409eff019be0863331..d5566303d6b416fb4b0f49a4a7eae2c81bddf17e:/ikiwiki diff --git a/ikiwiki b/ikiwiki index c2d5e6475..cb8295cf9 100755 --- a/ikiwiki +++ b/ikiwiki @@ -6,23 +6,22 @@ use File::Find; use Memoize; use File::Spec; +$ENV{PATH}="/usr/local/bin:/usr/bin:/bin"; + BEGIN { $blosxom::version="is a proper perl module too much to ask?"; do "/usr/bin/markdown"; } -memoize('pagename'); -memoize('bestlink'); - -my ($srcdir)= shift =~ /(.*)/; # untaint -my ($destdir)= shift =~ /(.*)/; # untaint +my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles, + %pagesources); my $link=qr/\[\[([^\s]+)\]\]/; -my $verbose=1; +my $verbose=0; my $wikiname="wiki"; -my %links; -my %oldpagemtime; -my %renderedfiles; +sub usage { + die "usage: ikiwiki [options] source dest\n"; +} sub error ($) { die @_; @@ -138,7 +137,7 @@ sub bestlink ($$) { } } while $cwd=~s!/?[^/]+$!!; - print STDERR "warning: page $page, broken link: $link\n"; + #print STDERR "warning: page $page, broken link: $link\n"; return ""; } @@ -156,6 +155,10 @@ sub htmllink ($$) { return $link if $page eq $bestlink; + # TODO BUG: %renderedfiles may not have it, if the linked to page + # was also added and isn't yet rendered! Note that this bug is + # masked by the bug mentioned below that makes all new files + # be rendered twice. if (! grep { $_ eq $bestlink } values %renderedfiles) { $bestlink=htmlpage($bestlink); } @@ -192,6 +195,33 @@ sub htmlize ($$) { } } +sub linkbacks ($$) { + my $content=shift; + my $page=shift; + + my @links; + foreach my $p (keys %links) { + next if bestlink($page, $p) eq $page; + if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) { + my $href=File::Spec->abs2rel(htmlpage($p), dirname($page)); + + # Trim common dir prefixes from both pages. + my $p_trimmed=$p; + my $page_trimmed=$page; + my $dir; + 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) && + defined $dir && + $p_trimmed=~s/^\Q$dir\E// && + $page_trimmed=~s/^\Q$dir\E//; + + push @links, "$p_trimmed"; + } + } + + $content.="

Links: ".join(" ", sort @links)."

\n" if @links; + return $content; +} + sub finalize ($$) { my $content=shift; my $page=shift; @@ -203,7 +233,7 @@ sub finalize ($$) { my $path=""; foreach my $dir (reverse split("/", $page)) { if (length($pagelink)) { - $pagelink="$dir/ $pagelink"; + $pagelink="$dir/ $pagelink"; } else { $pagelink=$dir; @@ -211,7 +241,7 @@ sub finalize ($$) { $path.="../"; } $path=~s/\.\.\/$/index.html/; - $pagelink="$wikiname/ $pagelink"; + $pagelink="$wikiname/ $pagelink"; $content="\n$title\n\n". "

$pagelink

\n". @@ -228,10 +258,12 @@ sub render ($) { my $content=readpage($file); if ($type ne 'unknown') { my $page=pagename($file); + $links{$page}=[findlinks($content)]; - + $content=linkify($content, $file); $content=htmlize($type, $content); + $content=linkbacks($content, $page); $content=finalize($content, $page); writepage(htmlpage($page), $content); @@ -249,11 +281,15 @@ sub render ($) { sub loadindex () { open (IN, "$srcdir/.index") || return; while () { + ($_)=/(.*)/; # untaint chomp; - my ($mtime, $page, $rendered, @links)=split(' ', $_); + my ($mtime, $file, $rendered, @links)=split(' ', $_); + my $page=pagename($file); + $pagesources{$page}=$file; $oldpagemtime{$page}=$mtime; - $links{$page}=\@links; - ($renderedfiles{$page})=$rendered=~m/(.*)/; # untaint + $oldlinks{$page}=[@links]; + $links{$page}=[@links]; + $renderedfiles{$page}=$rendered; } close IN; } @@ -261,7 +297,7 @@ sub loadindex () { sub saveindex () { open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!"); foreach my $page (keys %oldpagemtime) { - print OUT "$oldpagemtime{$page} $page $renderedfiles{$page} ". + print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ". join(" ", @{$links{$page}})."\n" if $oldpagemtime{$page}; } @@ -302,23 +338,28 @@ sub refresh () { }, }, $srcdir); + my %rendered; + # check for added or removed pages - my @adddel; + my @add; foreach my $file (@files) { my $page=pagename($file); if (! $oldpagemtime{$page}) { debug("new page $page"); - push @adddel, $page; + push @add, $file; $links{$page}=[]; + $pagesources{$page}=$file; } } + my @del; foreach my $page (keys %oldpagemtime) { if (! $exists{$page}) { debug("removing old page $page"); + push @del, $renderedfiles{$page}; prune($destdir."/".$renderedfiles{$page}); delete $renderedfiles{$page}; $oldpagemtime{$page}=0; - push @adddel, $page; + delete $pagesources{$page}; } } @@ -330,27 +371,142 @@ sub refresh () { mtime("$srcdir/$file") > $oldpagemtime{$page}) { debug("rendering changed file $file"); render($file); + $rendered{$file}=1; } } # if any files were added or removed, check to see if each page # needs an update due to linking to them - if (@adddel) { + # TODO: inefficient; pages may get rendered above and again here; + # problem is the bestlink may have changed and we won't know until + # now + if (@add || @del) { FILE: foreach my $file (@files) { my $page=pagename($file); - foreach my $p (@adddel) { + foreach my $f (@add, @del) { + my $p=pagename($f); foreach my $link (@{$links{$page}}) { if (bestlink($page, $link) eq $p) { debug("rendering $file, which links to $p"); render($file); + $rendered{$file}=1; next FILE; } } } } } + + # handle linkbacks; if a page has added/removed links, update the + # pages it links to + # TODO: inefficient; pages may get rendered above and again here; + # problem is the linkbacks could be wrong in the first pass render + # above + if (%rendered) { + my %linkchanged; + foreach my $file (keys %rendered, @del) { + my $page=pagename($file); + if (exists $links{$page}) { + foreach my $link (@{$links{$page}}) { + $link=bestlink($page, $link); + if (length $link && + ! exists $oldlinks{$page} || + ! grep { $_ eq $link } @{$oldlinks{$page}}) { + $linkchanged{$link}=1; + } + } + } + if (exists $oldlinks{$page}) { + foreach my $link (@{$oldlinks{$page}}) { + $link=bestlink($page, $link); + if (length $link && + ! exists $links{$page} || + ! grep { $_ eq $link } @{$links{$page}}) { + $linkchanged{$link}=1; + } + } + } + } + foreach my $link (keys %linkchanged) { + my $linkfile=$pagesources{$link}; + if (defined $linkfile) { + debug("rendering $linkfile, to update its linkbacks"); + render($linkfile); + } + } + } +} + +# Generates a C wrapper program for running ikiwiki in a specific way. +# The wrapper may be safely made suid. +sub gen_wrapper ($$) { + my ($offline, $rebuild)=@_; + + eval {use Cwd 'abs_path'}; + $srcdir=abs_path($srcdir); + $destdir=abs_path($destdir); + my $this=abs_path($0); + if (! -x $this) { + error("$this doesn't seem to be executable"); + } + + my $call=qq{"$this", "$this", "$srcdir", "$destdir", "--wikiname=$wikiname"}; + $call.=', "--verbose"' if $verbose; + $call.=', "--rebuild"' if $rebuild; + $call.=', "--offline"' if $offline; + + open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");; + print OUT <<"EOF"; +/* A suid wraper for ikiwiki */ +#include +#include +#include + +int main (void) { + clearenv(); + execl($call, NULL); + perror("failed to run $this"); + exit(1); +} +EOF + close OUT; + if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) { + error("failed to compile ikiwiki-wrap.c"); + } + unlink("ikiwiki-wrap.c"); + print "successfully generated ikiwiki-wrap\n"; + exit 0; +} + +sub update () { + if (-d "$srcdir/.svn") { + if (system("svn", "update", "--quiet", $srcdir) != 0) { + warn("svn update failed\n"); + } + } +} + +my $rebuild=0; +my $offline=0; +my $gen_wrapper=0; +if (grep /^-/, @ARGV) { + eval {use Getopt::Long}; + GetOptions( + "wikiname=s" => \$wikiname, + "verbose|v" => \$verbose, + "rebuild" => \$rebuild, + "gen-wrapper" => \$gen_wrapper, + "offline" => \$offline, + ) || usage(); } +usage() unless @ARGV == 2; +($srcdir) = shift =~ /(.*)/; # untaint +($destdir) = shift =~ /(.*)/; # untaint -loadindex(); +gen_wrapper($offline, $rebuild) if $gen_wrapper; +memoize('pagename'); +memoize('bestlink'); +update() unless $offline; +loadindex() unless $rebuild; refresh(); saveindex();