9 $ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
12 $blosxom::version="is a proper perl module too much to ask?";
13 do "/usr/bin/markdown";
16 my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
18 my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
19 my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
20 my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.)!;
23 my $default_pagetype=".mdwn";
28 die "usage: ikiwiki [options] source dest\n";
33 print "Content-type: text/html\n\n";
43 print "@_\n" if $verbose;
49 return (stat($page))[9];
52 sub possibly_foolish_untaint ($) {
54 my ($untainted)=$tainted=~/(.*)/;
75 if ($page =~ /\.mdwn$/) {
86 my $type=pagetype($file);
88 $page=~s/\Q$type\E*$// unless $type eq 'unknown';
102 open (IN, "$file") || error("failed to read $file: $!");
112 my $dir=dirname($file);
115 foreach my $s (split(m!/+!, $dir)) {
118 mkdir($d) || error("failed to create directory $d: $!");
123 open (OUT, ">$file") || error("failed to write $file: $!");
132 while ($content =~ /$wiki_link_regexp/g) {
138 # Given a page and the text of a link on the page, determine which existing
139 # page that link best points to. Prefers pages under a subdirectory with
140 # the same name as the source page, failing that goes down the directory tree
141 # to the base looking for matching pages.
149 $l.="/" if length $l;
152 if (exists $links{$l}) {
153 #debug("for $page, \"$link\", use $l");
156 } while $cwd=~s!/?[^/]+$!!;
158 #print STDERR "warning: page $page, broken link: $link\n";
162 sub isinlinableimage ($) {
165 $file=~/\.(png|gif|jpg|jpeg)$/;
172 my $bestlink=bestlink($page, $link);
174 return $link if $page eq $bestlink;
176 # TODO BUG: %renderedfiles may not have it, if the linked to page
177 # was also added and isn't yet rendered! Note that this bug is
178 # masked by the bug mentioned below that makes all new files
180 if (! grep { $_ eq $bestlink } values %renderedfiles) {
181 $bestlink=htmlpage($bestlink);
183 if (! grep { $_ eq $bestlink } values %renderedfiles) {
184 return "<a href=\"?\">?</a>$link"
187 $bestlink=File::Spec->abs2rel($bestlink, dirname($page));
189 if (isinlinableimage($bestlink)) {
190 return "<img src=\"$bestlink\">";
192 return "<a href=\"$bestlink\">$link</a>";
199 $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
208 if ($type eq '.mdwn') {
209 return Markdown::Markdown($content);
212 error("htmlization of $type not supported");
221 foreach my $p (keys %links) {
222 next if bestlink($page, $p) eq $page;
223 if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
224 my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
226 # Trim common dir prefixes from both pages.
228 my $page_trimmed=$page;
230 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
232 $p_trimmed=~s/^\Q$dir\E// &&
233 $page_trimmed=~s/^\Q$dir\E//;
235 push @links, "<a href=\"$href\">$p_trimmed</a>";
239 $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
247 my $title=basename($page);
252 foreach my $dir (reverse split("/", $page)) {
253 if (length($pagelink)) {
254 $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
261 $path=~s/\.\.\/$/index.html/;
262 $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
264 $content="<html>\n<head><title>$title</title></head>\n<body>\n".
265 "<h1>$pagelink</h1>\n".
267 "</body>\n</html>\n";
275 my $type=pagetype($file);
276 my $content=readfile("$srcdir/$file");
277 if ($type ne 'unknown') {
278 my $page=pagename($file);
280 $links{$page}=[findlinks($content)];
282 $content=linkify($content, $file);
283 $content=htmlize($type, $content);
284 $content=linkbacks($content, $page);
285 $content=finalize($content, $page);
287 writefile("$destdir/".htmlpage($page), $content);
288 $oldpagemtime{$page}=time;
289 $renderedfiles{$page}=htmlpage($page);
293 writefile("$destdir/$file", $content);
294 $oldpagemtime{$file}=time;
295 $renderedfiles{$file}=$file;
300 open (IN, "$srcdir/.index") || return;
302 $_=possibly_foolish_untaint($_);
304 my ($mtime, $file, $rendered, @links)=split(' ', $_);
305 my $page=pagename($file);
306 $pagesources{$page}=$file;
307 $oldpagemtime{$page}=$mtime;
308 $oldlinks{$page}=[@links];
309 $links{$page}=[@links];
310 $renderedfiles{$page}=$rendered;
316 open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
317 foreach my $page (keys %oldpagemtime) {
318 print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
319 join(" ", @{$links{$page}})."\n"
320 if $oldpagemtime{$page};
326 if (-d "$srcdir/.svn") {
327 if (system("svn", "update", "--quiet", $srcdir) != 0) {
328 warn("svn update failed\n");
337 my $dir=dirname($file);
338 while (rmdir($dir)) {
344 # Find existing pages.
350 if (/$wiki_file_prune_regexp/) {
351 $File::Find::prune=1;
353 elsif (! -d $_ && ! /\.html$/ && ! /\/\./) {
354 my ($f)=/$wiki_file_regexp/; # untaint
356 warn("skipping bad filename $_\n");
359 $f=~s/^\Q$srcdir\E\/?//;
361 $exists{pagename($f)}=1;
369 # check for added or removed pages
371 foreach my $file (@files) {
372 my $page=pagename($file);
373 if (! $oldpagemtime{$page}) {
374 debug("new page $page");
377 $pagesources{$page}=$file;
381 foreach my $page (keys %oldpagemtime) {
382 if (! $exists{$page}) {
383 debug("removing old page $page");
384 push @del, $renderedfiles{$page};
385 prune($destdir."/".$renderedfiles{$page});
386 delete $renderedfiles{$page};
387 $oldpagemtime{$page}=0;
388 delete $pagesources{$page};
392 # render any updated files
393 foreach my $file (@files) {
394 my $page=pagename($file);
396 if (! exists $oldpagemtime{$page} ||
397 mtime("$srcdir/$file") > $oldpagemtime{$page}) {
398 debug("rendering changed file $file");
404 # if any files were added or removed, check to see if each page
405 # needs an update due to linking to them
406 # TODO: inefficient; pages may get rendered above and again here;
407 # problem is the bestlink may have changed and we won't know until
410 FILE: foreach my $file (@files) {
411 my $page=pagename($file);
412 foreach my $f (@add, @del) {
414 foreach my $link (@{$links{$page}}) {
415 if (bestlink($page, $link) eq $p) {
416 debug("rendering $file, which links to $p");
426 # handle linkbacks; if a page has added/removed links, update the
428 # TODO: inefficient; pages may get rendered above and again here;
429 # problem is the linkbacks could be wrong in the first pass render
433 foreach my $file (keys %rendered, @del) {
434 my $page=pagename($file);
435 if (exists $links{$page}) {
436 foreach my $link (@{$links{$page}}) {
437 $link=bestlink($page, $link);
439 ! exists $oldlinks{$page} ||
440 ! grep { $_ eq $link } @{$oldlinks{$page}}) {
441 $linkchanged{$link}=1;
445 if (exists $oldlinks{$page}) {
446 foreach my $link (@{$oldlinks{$page}}) {
447 $link=bestlink($page, $link);
449 ! exists $links{$page} ||
450 ! grep { $_ eq $link } @{$links{$page}}) {
451 $linkchanged{$link}=1;
456 foreach my $link (keys %linkchanged) {
457 my $linkfile=$pagesources{$link};
458 if (defined $linkfile) {
459 debug("rendering $linkfile, to update its linkbacks");
466 # Generates a C wrapper program for running ikiwiki in a specific way.
467 # The wrapper may be safely made suid.
468 sub gen_wrapper ($$) {
469 my ($offline, $rebuild)=@_;
471 eval {use Cwd 'abs_path'};
472 $srcdir=abs_path($srcdir);
473 $destdir=abs_path($destdir);
474 my $this=abs_path($0);
476 error("$this doesn't seem to be executable");
479 my $call=qq{"$this", "$this", "$srcdir", "$destdir", "--wikiname=$wikiname"};
480 $call.=', "--verbose"' if $verbose;
481 $call.=', "--rebuild"' if $rebuild;
482 $call.=', "--offline"' if $offline;
483 $call.=', "--cgi"' if $cgi;
484 $call.=', "--url='.$url.'"' if $url;
486 # For CGI we need all these environment variables.
487 my @envsave=qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
488 CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE};
490 foreach my $var (@envsave) {
492 if ((s=getenv("$var")))
493 asprintf(&newenviron[i++], "%s=%s", "$var", s);
497 open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
499 /* A wrapper for ikiwiki, can be safely made suid. */
506 extern char **environ;
509 /* Sanitize environment. */
512 char *newenviron[$#envsave+2];
523 perror("failed to run $this");
528 if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
529 error("failed to compile ikiwiki-wrap.c");
531 unlink("ikiwiki-wrap.c");
532 print "successfully generated ikiwiki-wrap\n";
540 my $do=$q->param('do');
541 if (! defined $do || ! length $do) {
542 error("\"do\" parameter missing");
545 my ($page)=$q->param('page')=~/$wiki_file_regexp/; # untaint
546 if (! defined $page || ! length $page || $page ne $q->param('page') ||
547 $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
548 error("bad page name");
551 my $action=$q->request_uri;
556 if (exists $pagesources{lc($page)}) {
557 $content=readfile("$srcdir/$pagesources{lc($page)}");
558 $content=~s/\n/\r\n/g;
560 $q->param("do", "save");
562 $q->start_html("$wikiname: Editing $page"),
563 $q->h1("$wikiname: Editing $page"),
564 $q->start_form(-action => $action),
567 $q->textarea(-name => 'content',
568 -default => $content,
572 $q->submit("Save Changes"),
573 # TODO: Cancel button returns to page.
574 # TODO: Preview button.
575 # TODO: Commit message field.
576 # TODO: Conflict prevention.
580 elsif ($do eq 'save') {
581 my $file=$page.$default_pagetype;
582 if (exists $pagesources{lc($page)}) {
583 $file=$pagesources{lc($page)};
586 my $content=$q->param('content');
587 $content=~s/\r\n/\n/g;
589 writefile("$srcdir/$file", $content);
591 print $q->redirect("$url/".htmlpage($page));
594 error("unknown do parameter");
601 if (grep /^-/, @ARGV) {
602 eval {use Getopt::Long};
604 "wikiname=s" => \$wikiname,
605 "verbose|v" => \$verbose,
606 "rebuild" => \$rebuild,
607 "wrapper" => \$wrapper,
608 "offline" => \$offline,
613 usage() unless @ARGV == 2;
614 ($srcdir) = possibly_foolish_untaint(shift);
615 ($destdir) = possibly_foolish_untaint(shift);
617 if ($cgi && ! length $url) {
618 error("Must specify url to wiki with --url when using --cgi");
621 gen_wrapper($offline, $rebuild) if $wrapper;
624 loadindex() unless $rebuild;
629 update() unless $offline;