9 $ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
12 $blosxom::version="is a proper perl module too much to ask?";
13 do "/usr/bin/markdown";
16 my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
18 my $link=qr/\[\[([^\s]+)\]\]/;
23 die "usage: ikiwiki [options] source dest\n";
31 print "@_\n" if $verbose;
37 return (stat($page))[9];
40 sub possibly_foolish_untaint ($) {
42 my ($untainted)=$tainted=~/(.*)/;
63 if ($page =~ /\.mdwn$/) {
74 my $type=pagetype($file);
76 $page=~s/\Q$type\E*$// unless $type eq 'unknown';
90 open (PAGE, "$srcdir/$page") || error("failed to read $page: $!");
100 my $dir=dirname("$destdir/$page");
103 foreach my $s (split(m!/+!, $dir)) {
106 mkdir($d) || error("failed to create directory $d: $!");
111 open (PAGE, ">$destdir/$page") || error("failed to write $page: $!");
120 while ($content =~ /$link/g) {
126 # Given a page and the text of a link on the page, determine which existing
127 # page that link best points to. Prefers pages under a subdirectory with
128 # the same name as the source page, failing that goes down the directory tree
129 # to the base looking for matching pages.
137 $l.="/" if length $l;
140 if (exists $links{$l}) {
141 #debug("for $page, \"$link\", use $l");
144 } while $cwd=~s!/?[^/]+$!!;
146 #print STDERR "warning: page $page, broken link: $link\n";
150 sub isinlinableimage ($) {
153 $file=~/\.(png|gif|jpg|jpeg)$/;
160 my $bestlink=bestlink($page, $link);
162 return $link if $page eq $bestlink;
164 # TODO BUG: %renderedfiles may not have it, if the linked to page
165 # was also added and isn't yet rendered! Note that this bug is
166 # masked by the bug mentioned below that makes all new files
168 if (! grep { $_ eq $bestlink } values %renderedfiles) {
169 $bestlink=htmlpage($bestlink);
171 if (! grep { $_ eq $bestlink } values %renderedfiles) {
172 return "<a href=\"?\">?</a>$link"
175 $bestlink=File::Spec->abs2rel($bestlink, dirname($page));
177 if (isinlinableimage($bestlink)) {
178 return "<img src=\"$bestlink\">";
180 return "<a href=\"$bestlink\">$link</a>";
187 $content =~ s/$link/htmllink(pagename($file), $1)/eg;
196 if ($type eq '.mdwn') {
197 return Markdown::Markdown($content);
200 error("htmlization of $type not supported");
209 foreach my $p (keys %links) {
210 next if bestlink($page, $p) eq $page;
211 if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
212 my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
214 # Trim common dir prefixes from both pages.
216 my $page_trimmed=$page;
218 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
220 $p_trimmed=~s/^\Q$dir\E// &&
221 $page_trimmed=~s/^\Q$dir\E//;
223 push @links, "<a href=\"$href\">$p_trimmed</a>";
227 $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
235 my $title=basename($page);
240 foreach my $dir (reverse split("/", $page)) {
241 if (length($pagelink)) {
242 $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
249 $path=~s/\.\.\/$/index.html/;
250 $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
252 $content="<html>\n<head><title>$title</title></head>\n<body>\n".
253 "<h1>$pagelink</h1>\n".
255 "</body>\n</html>\n";
263 my $type=pagetype($file);
264 my $content=readpage($file);
265 if ($type ne 'unknown') {
266 my $page=pagename($file);
268 $links{$page}=[findlinks($content)];
270 $content=linkify($content, $file);
271 $content=htmlize($type, $content);
272 $content=linkbacks($content, $page);
273 $content=finalize($content, $page);
275 writepage(htmlpage($page), $content);
276 $oldpagemtime{$page}=time;
277 $renderedfiles{$page}=htmlpage($page);
281 writepage($file, $content);
282 $oldpagemtime{$file}=time;
283 $renderedfiles{$file}=$file;
288 open (IN, "$srcdir/.index") || return;
290 $_=possibly_foolish_untaint($_);
292 my ($mtime, $file, $rendered, @links)=split(' ', $_);
293 my $page=pagename($file);
294 $pagesources{$page}=$file;
295 $oldpagemtime{$page}=$mtime;
296 $oldlinks{$page}=[@links];
297 $links{$page}=[@links];
298 $renderedfiles{$page}=$rendered;
304 open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
305 foreach my $page (keys %oldpagemtime) {
306 print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
307 join(" ", @{$links{$page}})."\n"
308 if $oldpagemtime{$page};
317 my $dir=dirname($file);
318 while (rmdir($dir)) {
324 # Find existing pages.
331 $File::Find::prune=1;
333 elsif (! -d $_ && ! /\.html$/ && ! /\/\./) {
334 my ($f)=/(^[-A-Za-z0-9_.:\/+]+$)/; # untaint
336 warn("skipping bad filename $_\n");
339 $f=~s/^\Q$srcdir\E\/?//;
341 $exists{pagename($f)}=1;
349 # check for added or removed pages
351 foreach my $file (@files) {
352 my $page=pagename($file);
353 if (! $oldpagemtime{$page}) {
354 debug("new page $page");
357 $pagesources{$page}=$file;
361 foreach my $page (keys %oldpagemtime) {
362 if (! $exists{$page}) {
363 debug("removing old page $page");
364 push @del, $renderedfiles{$page};
365 prune($destdir."/".$renderedfiles{$page});
366 delete $renderedfiles{$page};
367 $oldpagemtime{$page}=0;
368 delete $pagesources{$page};
372 # render any updated files
373 foreach my $file (@files) {
374 my $page=pagename($file);
376 if (! exists $oldpagemtime{$page} ||
377 mtime("$srcdir/$file") > $oldpagemtime{$page}) {
378 debug("rendering changed file $file");
384 # if any files were added or removed, check to see if each page
385 # needs an update due to linking to them
386 # TODO: inefficient; pages may get rendered above and again here;
387 # problem is the bestlink may have changed and we won't know until
390 FILE: foreach my $file (@files) {
391 my $page=pagename($file);
392 foreach my $f (@add, @del) {
394 foreach my $link (@{$links{$page}}) {
395 if (bestlink($page, $link) eq $p) {
396 debug("rendering $file, which links to $p");
406 # handle linkbacks; if a page has added/removed links, update the
408 # TODO: inefficient; pages may get rendered above and again here;
409 # problem is the linkbacks could be wrong in the first pass render
413 foreach my $file (keys %rendered, @del) {
414 my $page=pagename($file);
415 if (exists $links{$page}) {
416 foreach my $link (@{$links{$page}}) {
417 $link=bestlink($page, $link);
419 ! exists $oldlinks{$page} ||
420 ! grep { $_ eq $link } @{$oldlinks{$page}}) {
421 $linkchanged{$link}=1;
425 if (exists $oldlinks{$page}) {
426 foreach my $link (@{$oldlinks{$page}}) {
427 $link=bestlink($page, $link);
429 ! exists $links{$page} ||
430 ! grep { $_ eq $link } @{$links{$page}}) {
431 $linkchanged{$link}=1;
436 foreach my $link (keys %linkchanged) {
437 my $linkfile=$pagesources{$link};
438 if (defined $linkfile) {
439 debug("rendering $linkfile, to update its linkbacks");
446 # Generates a C wrapper program for running ikiwiki in a specific way.
447 # The wrapper may be safely made suid.
448 sub gen_wrapper ($$) {
449 my ($offline, $rebuild)=@_;
451 eval {use Cwd 'abs_path'};
452 $srcdir=abs_path($srcdir);
453 $destdir=abs_path($destdir);
454 my $this=abs_path($0);
456 error("$this doesn't seem to be executable");
459 my $call=qq{"$this", "$this", "$srcdir", "$destdir", "--wikiname=$wikiname"};
460 $call.=', "--verbose"' if $verbose;
461 $call.=', "--rebuild"' if $rebuild;
462 $call.=', "--offline"' if $offline;
464 open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
466 /* A suid wraper for ikiwiki */
474 perror("failed to run $this");
479 if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
480 error("failed to compile ikiwiki-wrap.c");
482 unlink("ikiwiki-wrap.c");
483 print "successfully generated ikiwiki-wrap\n";
488 if (-d "$srcdir/.svn") {
489 if (system("svn", "update", "--quiet", $srcdir) != 0) {
490 warn("svn update failed\n");
498 if (grep /^-/, @ARGV) {
499 eval {use Getopt::Long};
501 "wikiname=s" => \$wikiname,
502 "verbose|v" => \$verbose,
503 "rebuild" => \$rebuild,
504 "gen-wrapper" => \$gen_wrapper,
505 "offline" => \$offline,
508 usage() unless @ARGV == 2;
509 ($srcdir) = possibly_foolish_untaint(shift);
510 ($destdir) = possibly_foolish_untaint(shift);
512 gen_wrapper($offline, $rebuild) if $gen_wrapper;
515 update() unless $offline;
516 loadindex() unless $rebuild;