use Memoize;
use File::Spec;
+$ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
+
BEGIN {
$blosxom::version="is a proper perl module too much to ask?";
do "/usr/bin/markdown";
}
-memoize('pagename');
-memoize('bestlink');
-
-sub usage {
- die "usage: ikiwiki [options] source dest\n";
-}
-
+my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
+ %pagesources);
my $link=qr/\[\[([^\s]+)\]\]/;
my $verbose=0;
-my $rebuild=0;
my $wikiname="wiki";
-if (grep /^-/, @ARGV) {
- eval {use Getopt::Long};
- GetOptions(
- "wikiname=s" => \$wikiname,
- "verbose|v" => \$verbose,
- "rebuild" => \$rebuild,
- ) || usage();
-}
-usage() unless @ARGV == 2;
-my ($srcdir) = shift =~ /(.*)/; # untaint
-my ($destdir) = shift =~ /(.*)/; # untaint
-my %links;
-my %oldlinks;
-my %oldpagemtime;
-my %renderedfiles;
-my %pagesources;
+sub usage {
+ die "usage: ikiwiki [options] source dest\n";
+}
sub error ($) {
die @_;
}
} while $cwd=~s!/?[^/]+$!!;
- print STDERR "warning: page $page, broken link: $link\n";
+ #print STDERR "warning: page $page, broken link: $link\n";
return "";
}
return $link if $page eq $bestlink;
+ # TODO BUG: %renderedfiles may not have it, if the linked to page
+ # was also added and isn't yet rendered! Note that this bug is
+ # masked by the bug mentioned below that makes all new files
+ # be rendered twice.
if (! grep { $_ eq $bestlink } values %renderedfiles) {
$bestlink=htmlpage($bestlink);
}
my @links;
foreach my $p (keys %links) {
- if (grep { $_ eq $page } @{$links{$p}}) {
+ next if bestlink($page, $p) eq $page;
+ if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
- push @links, "<a href=\"$href\">$p</a>";
+
+ # Trim common dir prefixes from both pages.
+ my $p_trimmed=$p;
+ my $page_trimmed=$page;
+ my $dir;
+ 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
+ defined $dir &&
+ $p_trimmed=~s/^\Q$dir\E// &&
+ $page_trimmed=~s/^\Q$dir\E//;
+
+ push @links, "<a href=\"$href\">$p_trimmed</a>";
}
}
my $page=pagename($file);
$pagesources{$page}=$file;
$oldpagemtime{$page}=$mtime;
- $links{$page}=[@links];
$oldlinks{$page}=[@links];
+ $links{$page}=[@links];
$renderedfiles{$page}=$rendered;
}
close IN;
prune($destdir."/".$renderedfiles{$page});
delete $renderedfiles{$page};
$oldpagemtime{$page}=0;
+ delete $pagesources{$page};
}
}
render($file);
$rendered{$file}=1;
}
- elsif ($rebuild) {
- debug("rebuilding unchanged file $file");
- render($file);
- $rendered{$file}=1;
- }
}
# if any files were added or removed, check to see if each page
# needs an update due to linking to them
+ # TODO: inefficient; pages may get rendered above and again here;
+ # problem is the bestlink may have changed and we won't know until
+ # now
if (@add || @del) {
FILE: foreach my $file (@files) {
- next if $rendered{$file};
my $page=pagename($file);
foreach my $f (@add, @del) {
my $p=pagename($f);
if (bestlink($page, $link) eq $p) {
debug("rendering $file, which links to $p");
render($file);
+ $rendered{$file}=1;
next FILE;
}
}
# pages it links to
# TODO: inefficient; pages may get rendered above and again here;
# problem is the linkbacks could be wrong in the first pass render
- # above.
+ # above
if (%rendered) {
my %linkchanged;
foreach my $file (keys %rendered, @del) {
- my $pagename=pagename($file);
- if (exists $links{$pagename}) {
- foreach my $link (@{$links{$pagename}}) {
- if (! exists $oldlinks{$pagename} ||
- ! grep { $_ eq $link } @{$oldlinks{$pagename}}) {
+ my $page=pagename($file);
+ if (exists $links{$page}) {
+ foreach my $link (@{$links{$page}}) {
+ $link=bestlink($page, $link);
+ if (length $link &&
+ ! exists $oldlinks{$page} ||
+ ! grep { $_ eq $link } @{$oldlinks{$page}}) {
$linkchanged{$link}=1;
}
}
}
- if (exists $oldlinks{$pagename}) {
- foreach my $link (@{$oldlinks{$pagename}}) {
- if (! exists $links{$pagename} ||
- ! grep { $_ eq $link } @{$links{$pagename}}) {
+ if (exists $oldlinks{$page}) {
+ foreach my $link (@{$oldlinks{$page}}) {
+ $link=bestlink($page, $link);
+ if (length $link &&
+ ! exists $links{$page} ||
+ ! grep { $_ eq $link } @{$links{$page}}) {
$linkchanged{$link}=1;
}
}
}
}
-loadindex();
+# Generates a C wrapper program for running ikiwiki in a specific way.
+# The wrapper may be safely made suid.
+sub gen_wrapper ($$) {
+ my ($offline, $rebuild)=@_;
+
+ eval {use Cwd 'abs_path'};
+ $srcdir=abs_path($srcdir);
+ $destdir=abs_path($destdir);
+ my $this=abs_path($0);
+ if (! -x $this) {
+ error("$this doesn't seem to be executable");
+ }
+
+ my $call=qq{"$this", "$this", "$srcdir", "$destdir", "--wikiname=$wikiname"};
+ $call.=', "--verbose"' if $verbose;
+ $call.=', "--rebuild"' if $rebuild;
+ $call.=', "--offline"' if $offline;
+
+ open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
+ print OUT <<"EOF";
+/* A suid wraper for ikiwiki */
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+
+int main (void) {
+ clearenv();
+ execl($call, NULL);
+ perror("failed to run $this");
+ exit(1);
+}
+EOF
+ close OUT;
+ if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
+ error("failed to compile ikiwiki-wrap.c");
+ }
+ unlink("ikiwiki-wrap.c");
+ print "successfully generated ikiwiki-wrap\n";
+ exit 0;
+}
+
+sub update () {
+ if (-d "$srcdir/.svn") {
+ if (system("svn", "update", "--quiet", $srcdir) != 0) {
+ warn("svn update failed\n");
+ }
+ }
+}
+
+my $rebuild=0;
+my $offline=0;
+my $gen_wrapper=0;
+if (grep /^-/, @ARGV) {
+ eval {use Getopt::Long};
+ GetOptions(
+ "wikiname=s" => \$wikiname,
+ "verbose|v" => \$verbose,
+ "rebuild" => \$rebuild,
+ "gen-wrapper" => \$gen_wrapper,
+ "offline" => \$offline,
+ ) || usage();
+}
+usage() unless @ARGV == 2;
+($srcdir) = shift =~ /(.*)/; # untaint
+($destdir) = shift =~ /(.*)/; # untaint
+
+gen_wrapper($offline, $rebuild) if $gen_wrapper;
+memoize('pagename');
+memoize('bestlink');
+update() unless $offline;
+loadindex() unless $rebuild;
refresh();
saveindex();