use Memoize;
use File::Spec;
+$ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
+
BEGIN {
$blosxom::version="is a proper perl module too much to ask?";
do "/usr/bin/markdown";
}
-memoize('pagename');
-memoize('bestlink');
-
-my ($srcdir)= shift =~ /(.*)/; # untaint
-my ($destdir)= shift =~ /(.*)/; # untaint
-my $link=qr/\[\[([^\s]+)\]\]/;
-my $verbose=1;
+my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
+ %pagesources);
+my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
+my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
+my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!;
+my $verbose=0;
my $wikiname="wiki";
-
-my %links;
-my %oldpagemtime;
-my %renderedfiles;
+my $default_pagetype=".mdwn";
+my $cgi=0;
+my $url="";
+my $cgiurl="";
+my $svn=1;
+
+sub usage {
+ die "usage: ikiwiki [options] source dest\n";
+}
sub error ($) {
- die @_;
+ if ($cgi) {
+ print "Content-type: text/html\n\n";
+ print "Error: @_\n";
+ exit 1;
+ }
+ else {
+ die @_;
+ }
}
sub debug ($) {
return (stat($page))[9];
}
+sub possibly_foolish_untaint ($) {
+ my $tainted=shift;
+ my ($untainted)=$tainted=~/(.*)/;
+ return $untainted;
+}
+
sub basename {
my $file=shift;
return $page.".html";
}
-sub readpage ($) {
- my $page=shift;
+sub readfile ($) {
+ my $file=shift;
local $/=undef;
- open (PAGE, "$srcdir/$page") || error("failed to read $page: $!");
- my $ret=<PAGE>;
- close PAGE;
+ open (IN, "$file") || error("failed to read $file: $!");
+ my $ret=<IN>;
+ close IN;
return $ret;
}
-sub writepage ($$) {
- my $page=shift;
+sub writefile ($$) {
+ my $file=shift;
my $content=shift;
- my $dir=dirname("$destdir/$page");
+ my $dir=dirname($file);
if (! -d $dir) {
my $d="";
foreach my $s (split(m!/+!, $dir)) {
}
}
- open (PAGE, ">$destdir/$page") || error("failed to write $page: $!");
- print PAGE $content;
- close PAGE;
+ open (OUT, ">$file") || error("failed to write $file: $!");
+ print OUT $content;
+ close OUT;
}
sub findlinks {
my $content=shift;
my @links;
- while ($content =~ /$link/g) {
+ while ($content =~ /$wiki_link_regexp/g) {
push @links, lc($1);
}
return @links;
}
} while $cwd=~s!/?[^/]+$!!;
- print STDERR "warning: page $page, broken link: $link\n";
+ #print STDERR "warning: page $page, broken link: $link\n";
return "";
}
return $link if $page eq $bestlink;
+ # TODO BUG: %renderedfiles may not have it, if the linked to page
+ # was also added and isn't yet rendered! Note that this bug is
+ # masked by the bug mentioned below that makes all new files
+ # be rendered twice.
if (! grep { $_ eq $bestlink } values %renderedfiles) {
$bestlink=htmlpage($bestlink);
}
if (! grep { $_ eq $bestlink } values %renderedfiles) {
- return "<a href=\"?\">?</a>$link"
+ return "<a href=\"$cgiurl?do=create&page=$link&from=$page\">?</a>$link"
}
$bestlink=File::Spec->abs2rel($bestlink, dirname($page));
my $content=shift;
my $file=shift;
- $content =~ s/$link/htmllink(pagename($file), $1)/eg;
+ $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
return $content;
}
}
}
+sub linkbacks ($$) {
+ my $content=shift;
+ my $page=shift;
+
+ my @links;
+ foreach my $p (keys %links) {
+ next if bestlink($page, $p) eq $page;
+ if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
+ my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
+
+ # Trim common dir prefixes from both pages.
+ my $p_trimmed=$p;
+ my $page_trimmed=$page;
+ my $dir;
+ 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
+ defined $dir &&
+ $p_trimmed=~s/^\Q$dir\E// &&
+ $page_trimmed=~s/^\Q$dir\E//;
+
+ push @links, "<a href=\"$href\">$p_trimmed</a>";
+ }
+ }
+
+ $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
+ return $content;
+}
+
sub finalize ($$) {
my $content=shift;
my $page=shift;
my $path="";
foreach my $dir (reverse split("/", $page)) {
if (length($pagelink)) {
- $pagelink="<a href=\"$path$dir.html\">$dir/</a> $pagelink";
+ $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
}
else {
$pagelink=$dir;
}
$path.="../";
}
- $path=~s/\.\.\///;
- $pagelink="<a href=\"$path\">$wikiname/</a> $pagelink";
+ $path=~s/\.\.\/$/index.html/;
+ $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
+
+ my @actions;
+ if (length $cgiurl) {
+ push @actions, "<a href=\"$cgiurl?do=edit&page=$page\">Edit</a>";
+ push @actions, "<a href=\"$cgiurl?do=recentchanges\">RecentChanges</a>";
+ }
$content="<html>\n<head><title>$title</title></head>\n<body>\n".
"<h1>$pagelink</h1>\n".
+ "@actions\n<hr>\n".
$content.
"</body>\n</html>\n";
my $file=shift;
my $type=pagetype($file);
- my $content=readpage($file);
+ my $content=readfile("$srcdir/$file");
if ($type ne 'unknown') {
my $page=pagename($file);
+
$links{$page}=[findlinks($content)];
-
+
$content=linkify($content, $file);
$content=htmlize($type, $content);
+ $content=linkbacks($content, $page);
$content=finalize($content, $page);
- writepage(htmlpage($page), $content);
+ writefile("$destdir/".htmlpage($page), $content);
$oldpagemtime{$page}=time;
$renderedfiles{$page}=htmlpage($page);
}
else {
$links{$file}=[];
- writepage($file, $content);
+ writefile("$destdir/$file", $content);
$oldpagemtime{$file}=time;
$renderedfiles{$file}=$file;
}
sub loadindex () {
open (IN, "$srcdir/.index") || return;
while (<IN>) {
+ $_=possibly_foolish_untaint($_);
chomp;
- my ($mtime, $page, $rendered, @links)=split(' ', $_);
+ my ($mtime, $file, $rendered, @links)=split(' ', $_);
+ my $page=pagename($file);
+ $pagesources{$page}=$file;
$oldpagemtime{$page}=$mtime;
- $links{$page}=\@links;
- ($renderedfiles{$page})=$rendered=~m/(.*)/; # untaint
+ $oldlinks{$page}=[@links];
+ $links{$page}=[@links];
+ $renderedfiles{$page}=$rendered;
}
close IN;
}
sub saveindex () {
open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
foreach my $page (keys %oldpagemtime) {
- print OUT "$oldpagemtime{$page} $page $renderedfiles{$page} ".
+ print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
join(" ", @{$links{$page}})."\n"
if $oldpagemtime{$page};
}
close OUT;
}
+sub rcs_update () {
+ if (-d "$srcdir/.svn") {
+ if (system("svn", "update", "--quiet", $srcdir) != 0) {
+ warn("svn update failed\n");
+ }
+ }
+}
+
+sub rcs_commit ($) {
+ my $message=shift;
+
+ if (-d "$srcdir/.svn") {
+ if (system("svn", "commit", "--quiet", "-m",
+ possibly_foolish_untaint($message), $srcdir) != 0) {
+ warn("svn commit failed\n");
+ }
+ }
+}
+
+sub rcs_add ($) {
+ my $file=shift;
+
+ if (-d "$srcdir/.svn") {
+ my $parent=dirname($file);
+ while (! -d "$srcdir/$parent/.svn") {
+ $file=$parent;
+ $parent=dirname($file);
+ }
+
+ if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) {
+ warn("svn add failed\n");
+ }
+ }
+}
+
+sub rcs_recentchanges ($) {
+ my $num=shift;
+ my @ret;
+
+ eval {use Date::Parse};
+ eval {use Time::Duration};
+
+ if (-d "$srcdir/.svn") {
+ my $info=`LANG=C svn info $srcdir`;
+ my ($svn_url)=$info=~/^URL: (.*)$/m;
+
+ my $div=qr/^--------------------+$/;
+ my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/;
+ my $state='start';
+ my ($rev, $user, $when, @pages, $message);
+ foreach (`LANG=C svn log -v '$svn_url'`) {
+ chomp;
+ print STDERR "state: $state ($_)\n";
+ if ($state eq 'start' && /$div/) {
+ $state='header';
+ }
+ elsif ($state eq 'header' && /$infoline/) {
+ $rev=$1;
+ $user=$2;
+ $when=ago(time - str2time($3));
+ }
+ elsif ($state eq 'header' && /^\s+(.*)/) {
+ push @pages, $1;
+ }
+ elsif ($state eq 'header' && /^$/) {
+ $state='body';
+ }
+ elsif ($state eq 'body' && /$div/) {
+ push @ret, {rev => $rev, user => $user,
+ date => $when, message => $message,
+ pages => [@pages]},
+ last if @ret >= $num;
+
+ $state='header';
+ $message=$rev=$user=$when=undef;
+ @pages=();
+ }
+ elsif ($state eq 'body') {
+ $message.="$_<br>\n";
+ }
+ }
+ }
+
+ return @ret;
+}
+
sub prune ($) {
my $file=shift;
find({
no_chdir => 1,
wanted => sub {
- if (/\/\.svn\//) {
+ if (/$wiki_file_prune_regexp/) {
$File::Find::prune=1;
}
- elsif (! -d $_ && ! /\.html$/ && ! /\/\./) {
- my ($f)=/(^[-A-Za-z0-9_.:\/+]+$)/; # untaint
+ elsif (! -d $_) {
+ my ($f)=/$wiki_file_regexp/; # untaint
if (! defined $f) {
warn("skipping bad filename $_\n");
}
},
}, $srcdir);
+ my %rendered;
+
# check for added or removed pages
- my @adddel;
+ my @add;
foreach my $file (@files) {
my $page=pagename($file);
if (! $oldpagemtime{$page}) {
debug("new page $page");
- push @adddel, $page;
+ push @add, $file;
$links{$page}=[];
+ $pagesources{$page}=$file;
}
}
+ my @del;
foreach my $page (keys %oldpagemtime) {
if (! $exists{$page}) {
debug("removing old page $page");
+ push @del, $renderedfiles{$page};
prune($destdir."/".$renderedfiles{$page});
delete $renderedfiles{$page};
$oldpagemtime{$page}=0;
- push @adddel, $page;
+ delete $pagesources{$page};
}
}
mtime("$srcdir/$file") > $oldpagemtime{$page}) {
debug("rendering changed file $file");
render($file);
+ $rendered{$file}=1;
}
}
# if any files were added or removed, check to see if each page
# needs an update due to linking to them
- if (@adddel) {
+ # TODO: inefficient; pages may get rendered above and again here;
+ # problem is the bestlink may have changed and we won't know until
+ # now
+ if (@add || @del) {
FILE: foreach my $file (@files) {
my $page=pagename($file);
- foreach my $p (@adddel) {
+ foreach my $f (@add, @del) {
+ my $p=pagename($f);
foreach my $link (@{$links{$page}}) {
if (bestlink($page, $link) eq $p) {
debug("rendering $file, which links to $p");
render($file);
+ $rendered{$file}=1;
next FILE;
}
}
}
}
}
+
+ # handle linkbacks; if a page has added/removed links, update the
+ # pages it links to
+ # TODO: inefficient; pages may get rendered above and again here;
+ # problem is the linkbacks could be wrong in the first pass render
+ # above
+ if (%rendered) {
+ my %linkchanged;
+ foreach my $file (keys %rendered, @del) {
+ my $page=pagename($file);
+ if (exists $links{$page}) {
+ foreach my $link (@{$links{$page}}) {
+ $link=bestlink($page, $link);
+ if (length $link &&
+ ! exists $oldlinks{$page} ||
+ ! grep { $_ eq $link } @{$oldlinks{$page}}) {
+ $linkchanged{$link}=1;
+ }
+ }
+ }
+ if (exists $oldlinks{$page}) {
+ foreach my $link (@{$oldlinks{$page}}) {
+ $link=bestlink($page, $link);
+ if (length $link &&
+ ! exists $links{$page} ||
+ ! grep { $_ eq $link } @{$links{$page}}) {
+ $linkchanged{$link}=1;
+ }
+ }
+ }
+ }
+ foreach my $link (keys %linkchanged) {
+ my $linkfile=$pagesources{$link};
+ if (defined $linkfile) {
+ debug("rendering $linkfile, to update its linkbacks");
+ render($linkfile);
+ }
+ }
+ }
+}
+
+# Generates a C wrapper program for running ikiwiki in a specific way.
+# The wrapper may be safely made suid.
+sub gen_wrapper ($$) {
+ my ($svn, $rebuild)=@_;
+
+ eval {use Cwd 'abs_path'};
+ $srcdir=abs_path($srcdir);
+ $destdir=abs_path($destdir);
+ my $this=abs_path($0);
+ if (! -x $this) {
+ error("$this doesn't seem to be executable");
+ }
+
+ my @params=($srcdir, $destdir, "--wikiname=$wikiname");
+ push @params, "--verbose" if $verbose;
+ push @params, "--rebuild" if $rebuild;
+ push @params, "--nosvn" if !$svn;
+ push @params, "--cgi" if $cgi;
+ push @params, "--url=$url" if $url;
+ push @params, "--cgiurl=$cgiurl" if $cgiurl;
+ my $params=join(" ", @params);
+ my $call='';
+ foreach my $p ($this, $this, @params) {
+ $call.=qq{"$p", };
+ }
+ $call.="NULL";
+
+ my @envsave;
+ push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
+ CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi;
+ my $envsave="";
+ foreach my $var (@envsave) {
+ $envsave.=<<"EOF"
+ if ((s=getenv("$var")))
+ asprintf(&newenviron[i++], "%s=%s", "$var", s);
+EOF
+ }
+
+ open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
+ print OUT <<"EOF";
+/* A wrapper for ikiwiki, can be safely made suid. */
+#define _GNU_SOURCE
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <string.h>
+
+extern char **environ;
+
+int main (int argc, char **argv) {
+ /* Sanitize environment. */
+ char *s;
+ char *newenviron[$#envsave+3];
+ int i=0;
+$envsave
+ newenviron[i++]="HOME=$ENV{HOME}";
+ newenviron[i]=NULL;
+ environ=newenviron;
+
+ if (argc == 2 && strcmp(argv[1], "--params") == 0) {
+ printf("$params\\n");
+ exit(0);
+ }
+
+ execl($call);
+ perror("failed to run $this");
+ exit(1);
}
+EOF
+ close OUT;
+ if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
+ error("failed to compile ikiwiki-wrap.c");
+ }
+ unlink("ikiwiki-wrap.c");
+ print "successfully generated ikiwiki-wrap\n";
+ exit 0;
+}
+
+sub cgi () {
+ eval q{use CGI};
+ my $q=CGI->new;
-loadindex();
-refresh();
-saveindex();
+ my $do=$q->param('do');
+ if (! defined $do || ! length $do) {
+ error("\"do\" parameter missing");
+ }
+
+ if ($do eq 'recentchanges') {
+ my $list="<ul>\n";
+ foreach my $change (rcs_recentchanges(100)) {
+ $list.="<li>";
+ foreach my $page (@{$change->{pages}}) {
+ $list.=$page." ";
+ }
+ $list.="<br>\n";
+ $list.="changed ".$change->{when}." by ".
+ htmllink("index", $change->{user}).
+ ": <i>".$change->{message}."<br>\n";
+ $list.="</li>\n";
+ }
+ $list.="</ul>\n";
+
+ print $q->header,
+ $q->start_html("RecentChanges"),
+ $q->h1("<a href=\"$url\">$wikiname</a>/ RecentChanges"),
+ $list,
+ $q->end_form,
+ $q->end_html;
+ return;
+ }
+
+ my ($page)=$q->param('page')=~/$wiki_file_regexp/;
+ if (! defined $page || ! length $page || $page ne $q->param('page') ||
+ $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
+ error("bad page name");
+ }
+ $page=lc($page);
+
+ my $action=$q->request_uri;
+ $action=~s/\?.*//;
+
+ if ($do eq 'create') {
+ if (exists $pagesources{lc($page)}) {
+ # hmm, someone else made the page in the meantime?
+ print $q->redirect("$url/".htmlpage($page));
+ }
+
+ my @page_locs;
+ my ($from)=$q->param('from')=~/$wiki_file_regexp/;
+ if (! defined $from || ! length $from ||
+ $from ne $q->param('from') ||
+ $from=~/$wiki_file_prune_regexp/ || $from=~/^\//) {
+ @page_locs=$page;
+ }
+ else {
+ my $dir=$from."/";
+ $dir=~s![^/]+/$!!;
+ push @page_locs, $dir.$page;
+ push @page_locs, "$from/$page";
+ while (length $dir) {
+ $dir=~s![^/]+/$!!;
+ push @page_locs, $dir.$page;
+ }
+ }
+
+ $q->param("do", "save");
+ print $q->header,
+ $q->start_html("Creating $page"),
+ $q->start_h1("<a href=\"$url\">$wikiname</a>/ Creating $page"),
+ $q->start_form(-action => $action),
+ $q->hidden('do'),
+ "Select page location:",
+ $q->popup_menu('page', \@page_locs),
+ $q->textarea(-name => 'content',
+ -default => "",
+ -rows => 20,
+ -columns => 80),
+ $q->br,
+ "Optional comment about this change:",
+ $q->br,
+ $q->textfield(-name => "comments", -size => 80),
+ $q->br,
+ $q->submit("Save Page"),
+ $q->end_form,
+ $q->end_html;
+ }
+ elsif ($do eq 'edit') {
+ my $content="";
+ if (exists $pagesources{lc($page)}) {
+ $content=readfile("$srcdir/$pagesources{lc($page)}");
+ $content=~s/\n/\r\n/g;
+ }
+ $q->param("do", "save");
+ print $q->header,
+ $q->start_html("Editing $page"),
+ $q->h1("<a href=\"$url\">$wikiname</a>/ Editing $page"),
+ $q->start_form(-action => $action),
+ $q->hidden('do'),
+ $q->hidden('page'),
+ $q->textarea(-name => 'content',
+ -default => $content,
+ -rows => 20,
+ -columns => 80),
+ $q->br,
+ "Optional comment about this change:",
+ $q->br,
+ $q->textfield(-name => "comments", -size => 80),
+ $q->br,
+ $q->submit("Save Page"),
+ $q->end_form,
+ $q->end_html;
+ }
+ elsif ($do eq 'save') {
+ my $file=$page.$default_pagetype;
+ my $newfile=1;
+ if (exists $pagesources{lc($page)}) {
+ $file=$pagesources{lc($page)};
+ $newfile=0;
+ }
+
+ my $content=$q->param('content');
+ $content=~s/\r\n/\n/g;
+ $content=~s/\r/\n/g;
+ writefile("$srcdir/$file", $content);
+
+ my $message="web commit from $ENV{REMOTE_ADDR}";
+ if (defined $q->param('comments')) {
+ $message.=": ".$q->param('comments');
+ }
+
+ if ($svn) {
+ if ($newfile) {
+ rcs_add($file);
+ }
+ # presumably the commit will trigger an update
+ # of the wiki
+ rcs_commit($message);
+ }
+ else {
+ refresh();
+ }
+
+ print $q->redirect("$url/".htmlpage($page));
+ }
+ else {
+ error("unknown do parameter");
+ }
+}
+
+my $rebuild=0;
+my $wrapper=0;
+if (grep /^-/, @ARGV) {
+ eval {use Getopt::Long};
+ GetOptions(
+ "wikiname=s" => \$wikiname,
+ "verbose|v" => \$verbose,
+ "rebuild" => \$rebuild,
+ "wrapper" => \$wrapper,
+ "svn!" => \$svn,
+ "cgi" => \$cgi,
+ "url=s" => \$url,
+ "cgiurl=s" => \$cgiurl,
+ ) || usage();
+}
+usage() unless @ARGV == 2;
+($srcdir) = possibly_foolish_untaint(shift);
+($destdir) = possibly_foolish_untaint(shift);
+
+if ($cgi && ! length $url) {
+ error("Must specify url to wiki with --url when using --cgi");
+}
+
+gen_wrapper($svn, $rebuild) if $wrapper;
+memoize('pagename');
+memoize('bestlink');
+loadindex() unless $rebuild;
+if ($cgi) {
+ cgi();
+}
+else {
+ rcs_update() if $svn;
+ refresh();
+ saveindex();
+}