9 sub linkify ($$) { #{{{
13 $content =~ s{(\\?)$config{wiki_link_regexp}}{
14 $2 ? ( $1 ? "[[$2|$3]]" : htmllink($page, titlepage($3), 0, 0, pagetitle($2)))
15 : ( $1 ? "[[$3]]" : htmllink($page, titlepage($3)))
23 return $_scrubber if defined $_scrubber;
25 eval q{use HTML::Scrubber};
26 # Lists based on http://feedparser.org/docs/html-sanitization.html
27 $_scrubber = HTML::Scrubber->new(
29 a abbr acronym address area b big blockquote br
30 button caption center cite code col colgroup dd del
31 dfn dir div dl dt em fieldset font form h1 h2 h3 h4
32 h5 h6 hr i img input ins kbd label legend li map
33 menu ol optgroup option p pre q s samp select small
34 span strike strong sub sup table tbody td textarea
35 tfoot th thead tr tt u ul var
37 default => [undef, { map { $_ => 1 } qw{
38 abbr accept accept-charset accesskey action
39 align alt axis border cellpadding cellspacing
40 char charoff charset checked cite class
41 clear cols colspan color compact coords
42 datetime dir disabled enctype for frame
43 headers height href hreflang hspace id ismap
44 label lang longdesc maxlength media method
45 multiple name nohref noshade nowrap prompt
46 readonly rel rev rows rowspan rules scope
47 selected shape size span src start summary
48 tabindex target title type usemap valign
55 sub htmlize ($$) { #{{{
59 if (! $INC{"/usr/bin/markdown"}) {
61 $blosxom::version="is a proper perl module too much to ask?";
63 do "/usr/bin/markdown";
66 if ($type eq '.mdwn') {
67 $content=Markdown::Markdown($content);
70 error("htmlization of $type not supported");
73 if ($config{sanitize}) {
74 $content=scrubber()->scrub($content);
80 sub backlinks ($) { #{{{
84 foreach my $p (keys %links) {
85 next if bestlink($page, $p) eq $page;
86 if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
87 my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
89 # Trim common dir prefixes from both pages.
91 my $page_trimmed=$page;
93 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
95 $p_trimmed=~s/^\Q$dir\E// &&
96 $page_trimmed=~s/^\Q$dir\E//;
98 push @links, { url => $href, page => $p_trimmed };
102 return sort { $a->{page} cmp $b->{page} } @links;
105 sub parentlinks ($) { #{{{
112 foreach my $dir (reverse split("/", $page)) {
115 unshift @ret, { url => "$path$dir.html", page => $dir };
121 unshift @ret, { url => length $path ? $path : ".", page => $config{wikiname} };
125 sub rsspage ($) { #{{{
131 sub preprocess ($$) { #{{{
135 my %commands=(inline => \&preprocess_inline);
141 if (length $escape) {
142 "[[$command $params]]";
144 elsif (exists $commands{$command}) {
146 while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
149 $commands{$command}->($page, %params);
152 "[[bad directive $command]]";
156 $content =~ s{(\\?)$config{wiki_processor_regexp}}{$handle->($1, $2, $3)}eg;
160 sub blog_list ($$) { #{{{
165 foreach my $page (keys %pagesources) {
166 if (globlist_match($page, $globlist)) {
171 @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
172 return @list if ! $maxitems || @list <= $maxitems;
173 return @list[0..$maxitems - 1];
176 sub get_inline_content ($$) { #{{{
177 my $parentpage=shift;
180 my $file=$pagesources{$page};
181 my $type=pagetype($file);
182 if ($type ne 'unknown') {
183 return htmlize($type, linkify(readfile(srcfile($file)), $parentpage));
190 sub preprocess_inline ($@) { #{{{
191 my $parentpage=shift;
194 if (! exists $params{pages}) {
197 if (! exists $params{archive}) {
198 $params{archive}="no";
200 if (! exists $params{show} && $params{archive} eq "no") {
203 $inlinepages{$parentpage}=$params{pages};
207 if (exists $params{rootpage}) {
208 my $formtemplate=HTML::Template->new(blind_cache => 1,
209 filename => "$config{templatedir}/blogpost.tmpl");
210 $formtemplate->param(cgiurl => $config{cgiurl});
211 $formtemplate->param(rootpage => $params{rootpage});
212 my $form=$formtemplate->output;
216 my $template=HTML::Template->new(blind_cache => 1,
217 filename => (($params{archive} eq "no")
218 ? "$config{templatedir}/inlinepage.tmpl"
219 : "$config{templatedir}/inlinepagetitle.tmpl"));
222 foreach my $page (blog_list($params{pages}, $params{show})) {
223 next if $page eq $parentpage;
225 $template->param(pagelink => htmllink($parentpage, $page));
226 $template->param(content => get_inline_content($parentpage, $page))
227 if $params{archive} eq "no";
228 $template->param(ctime => scalar(gmtime($pagectime{$page})));
229 $ret.=$template->output;
232 # TODO: should really add this to renderedfiles and call
233 # check_overwrite, but currently renderedfiles
234 # only supports listing one file per page.
236 writefile(rsspage($parentpage), $config{destdir},
237 genrss($parentpage, @pages));
243 sub genpage ($$$) { #{{{
248 my $title=pagetitle(basename($page));
250 my $template=HTML::Template->new(blind_cache => 1,
251 filename => "$config{templatedir}/page.tmpl");
253 if (length $config{cgiurl}) {
254 $template->param(editurl => cgiurl(do => "edit", page => $page));
255 $template->param(prefsurl => cgiurl(do => "prefs"));
257 $template->param(recentchangesurl => cgiurl(do => "recentchanges"));
261 if (length $config{historyurl}) {
262 my $u=$config{historyurl};
263 $u=~s/\[\[file\]\]/$pagesources{$page}/g;
264 $template->param(historyurl => $u);
266 if ($config{hyperestraier}) {
267 $template->param(hyperestraierurl => cgiurl());
270 if ($config{rss} && $inlinepages{$page}) {
271 $template->param(rssurl => rsspage(basename($page)));
276 wikiname => $config{wikiname},
277 parentlinks => [parentlinks($page)],
279 backlinks => [backlinks($page)],
280 discussionlink => htmllink($page, "Discussion", 1, 1),
281 mtime => scalar(gmtime($mtime)),
282 styleurl => styleurl($page),
285 return $template->output;
288 sub date_822 ($) { #{{{
292 return POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
295 sub absolute_urls ($$) { #{{{
296 # sucky sub because rss sucks
302 $content=~s/<a\s+href="(?!http:\/\/)([^"]+)"/<a href="$url$1"/ig;
303 $content=~s/<img\s+src="(?!http:\/\/)([^"]+)"/<img src="$url$1"/ig;
307 sub genrss ($@) { #{{{
311 my $url="$config{url}/".htmlpage($page);
313 my $template=HTML::Template->new(blind_cache => 1,
314 filename => "$config{templatedir}/rsspage.tmpl");
317 foreach my $p (@pages) {
319 itemtitle => pagetitle(basename($p)),
320 itemurl => "$config{url}/$renderedfiles{$p}",
321 itempubdate => date_822($pagectime{$p}),
322 itemcontent => absolute_urls(get_inline_content($page, $p), $url),
323 } if exists $renderedfiles{$p};
327 title => $config{wikiname},
332 return $template->output;
335 sub check_overwrite ($$) { #{{{
336 # Important security check. Make sure to call this before saving
337 # any files to the source directory.
341 if (! exists $renderedfiles{$src} && -e $dest && ! $config{rebuild}) {
342 error("$dest already exists and was rendered from ".
343 join(" ",(grep { $renderedfiles{$_} eq $dest } keys
345 ", before, so not rendering from $src");
352 return (stat($file))[9];
355 sub findlinks ($$) { #{{{
360 while ($content =~ /(?<!\\)$config{wiki_link_regexp}/g) {
361 push @links, titlepage($2);
363 # Discussion links are a special case since they're not in the text
364 # of the page, but on its template.
365 return @links, "$page/discussion";
368 sub render ($) { #{{{
371 my $type=pagetype($file);
372 my $srcfile=srcfile($file);
373 if ($type ne 'unknown') {
374 my $content=readfile($srcfile);
375 my $page=pagename($file);
377 $links{$page}=[findlinks($content, $page)];
378 delete $inlinepages{$page};
380 $content=linkify($content, $page);
381 $content=preprocess($page, $content);
382 $content=htmlize($type, $content);
384 check_overwrite("$config{destdir}/".htmlpage($page), $page);
385 writefile(htmlpage($page), $config{destdir},
386 genpage($content, $page, mtime($srcfile)));
387 $oldpagemtime{$page}=time;
388 $renderedfiles{$page}=htmlpage($page);
391 my $content=readfile($srcfile, 1);
393 check_overwrite("$config{destdir}/$file", $file);
394 writefile($file, $config{destdir}, $content, 1);
395 $oldpagemtime{$file}=time;
396 $renderedfiles{$file}=$file;
404 my $dir=dirname($file);
405 while (rmdir($dir)) {
411 my $estdir="$config{wikistatedir}/hyperestraier";
412 my $cgi=basename($config{cgiurl});
414 open(TEMPLATE, ">$estdir/$cgi.tmpl") ||
415 error("write $estdir/$cgi.tmpl: $!");
416 print TEMPLATE misctemplate("search",
417 "<!--ESTFORM-->\n\n<!--ESTRESULT-->\n\n<!--ESTINFO-->\n\n");
419 open(TEMPLATE, ">$estdir/$cgi.conf") ||
420 error("write $estdir/$cgi.conf: $!");
421 my $template=HTML::Template->new(
422 filename => "$config{templatedir}/estseek.conf"
424 eval q{use Cwd 'abs_path'};
427 tmplfile => "$estdir/$cgi.tmpl",
428 destdir => abs_path($config{destdir}),
431 print TEMPLATE $template->output;
433 $cgi="$estdir/".basename($config{cgiurl});
435 symlink("/usr/lib/estraier/estseek.cgi", $cgi) ||
436 error("symlink $cgi: $!");
439 sub estcmd ($;@) { #{{{
440 my @params=split(' ', shift);
441 push @params, "-cl", "$config{wikistatedir}/hyperestraier";
446 my $pid=open(CHILD, "|-");
452 close(CHILD) || error("estcmd @params exited nonzero: $?");
456 open(STDOUT, "/dev/null"); # shut it up (closing won't work)
457 exec("estcmd", @params) || error("can't run estcmd");
461 sub refresh () { #{{{
462 # find existing pages
465 eval q{use File::Find};
469 if (/$config{wiki_file_prune_regexp}/) {
470 $File::Find::prune=1;
472 elsif (! -d $_ && ! -l $_) {
473 my ($f)=/$config{wiki_file_regexp}/; # untaint
475 warn("skipping bad filename $_\n");
478 $f=~s/^\Q$config{srcdir}\E\/?//;
480 $exists{pagename($f)}=1;
488 if (/$config{wiki_file_prune_regexp}/) {
489 $File::Find::prune=1;
491 elsif (! -d $_ && ! -l $_) {
492 my ($f)=/$config{wiki_file_regexp}/; # untaint
494 warn("skipping bad filename $_\n");
497 # Don't add files that are in the
499 $f=~s/^\Q$config{underlaydir}\E\/?//;
500 if (! -e "$config{srcdir}/$f" &&
501 ! -l "$config{srcdir}/$f") {
503 $exists{pagename($f)}=1;
508 }, $config{underlaydir});
512 # check for added or removed pages
514 foreach my $file (@files) {
515 my $page=pagename($file);
516 if (! $oldpagemtime{$page}) {
517 debug("new page $page") unless exists $pagectime{$page};
520 $pagesources{$page}=$file;
521 $pagectime{$page}=mtime(srcfile($file))
522 unless exists $pagectime{$page};
526 foreach my $page (keys %oldpagemtime) {
527 if (! $exists{$page}) {
528 debug("removing old page $page");
529 push @del, $pagesources{$page};
530 prune($config{destdir}."/".$renderedfiles{$page});
531 delete $renderedfiles{$page};
532 $oldpagemtime{$page}=0;
533 delete $pagesources{$page};
537 # render any updated files
538 foreach my $file (@files) {
539 my $page=pagename($file);
541 if (! exists $oldpagemtime{$page} ||
542 mtime(srcfile($file)) > $oldpagemtime{$page}) {
543 debug("rendering changed file $file");
549 # if any files were added or removed, check to see if each page
550 # needs an update due to linking to them or inlining them.
551 # TODO: inefficient; pages may get rendered above and again here;
552 # problem is the bestlink may have changed and we won't know until
555 FILE: foreach my $file (@files) {
556 my $page=pagename($file);
557 foreach my $f (@add, @del) {
559 foreach my $link (@{$links{$page}}) {
560 if (bestlink($page, $link) eq $p) {
561 debug("rendering $file, which links to $p");
571 # Handle backlinks; if a page has added/removed links, update the
572 # pages it links to. Also handle inlining here.
573 # TODO: inefficient; pages may get rendered above and again here;
574 # problem is the backlinks could be wrong in the first pass render
576 if (%rendered || @del) {
577 foreach my $f (@files) {
579 if (exists $inlinepages{$p}) {
580 foreach my $file (keys %rendered, @del) {
581 my $page=pagename($file);
582 if (globlist_match($page, $inlinepages{$p})) {
583 debug("rendering $f, which inlines $page");
593 foreach my $file (keys %rendered, @del) {
594 my $page=pagename($file);
596 if (exists $links{$page}) {
597 foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
599 ! exists $oldlinks{$page} ||
600 ! grep { $_ eq $link } @{$oldlinks{$page}}) {
601 $linkchanged{$link}=1;
605 if (exists $oldlinks{$page}) {
606 foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
608 ! exists $links{$page} ||
609 ! grep { $_ eq $link } @{$links{$page}}) {
610 $linkchanged{$link}=1;
615 foreach my $link (keys %linkchanged) {
616 my $linkfile=$pagesources{$link};
617 if (defined $linkfile) {
618 debug("rendering $linkfile, to update its backlinks");
620 $rendered{$linkfile}=1;
625 if ($config{hyperestraier} && (%rendered || @del)) {
626 debug("updating hyperestraier search index");
628 estcmd("gather -cm -bc -cl -sd",
629 map { $config{destdir}."/".$renderedfiles{pagename($_)} }
636 debug("generating hyperestraier cgi config");