From: Joey Hess Date: Sat, 17 Apr 2010 16:50:23 +0000 (-0400) Subject: Merge remote branch 'davrieb/autotag' into autotag X-Git-Tag: 3.20100427~62^2~28 X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/commitdiff_plain/54f600af14bf6dc067ffc30ec6f22d517001fe99?hp=-c Merge remote branch 'davrieb/autotag' into autotag Conflicts: IkiWiki.pm IkiWiki/Plugin/tag.pm --- 54f600af14bf6dc067ffc30ec6f22d517001fe99 diff --combined IkiWiki.pm index b37b1f344,966a3bbc6..2f26a16ce --- a/IkiWiki.pm +++ b/IkiWiki.pm @@@ -7,24 -7,24 +7,25 @@@ use strict use Encode; use HTML::Entities; use URI::Escape q{uri_escape_utf8}; -use POSIX; +use POSIX (); use Storable; use open qw{:utf8 :std}; use vars qw{%config %links %oldlinks %pagemtime %pagectime %pagecase %pagestate %wikistate %renderedfiles %oldrenderedfiles %pagesources %destsources %depends %depends_simple %hooks - %forcerebuild %loaded_plugins %typedlinks %oldtypedlinks}; - %forcerebuild %loaded_plugins %autofiles %del_hash}; ++ %forcerebuild %loaded_plugins %typedlinks %oldtypedlinks ++ %autofiles %del_hash}; use Exporter q{import}; our @EXPORT = qw(hook debug error template htmlpage deptype add_depends pagespec_match pagespec_match_list bestlink htmllink readfile writefile pagetype srcfile pagename - displaytime will_render gettext urlto targetpage + displaytime will_render gettext ngettext urlto targetpage add_underlay pagetitle titlepage linkpage newpagefile - inject add_link + inject add_link add_autofile %config %links %pagestate %wikistate %renderedfiles - %pagesources %destsources); + %pagesources %destsources %typedlinks); our $VERSION = 3.00; # plugin interface version, next is ikiwiki version our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE our $installdir='/usr'; # INSTALLDIR_AUTOREPLACE done by Makefile, DNE @@@ -37,7 -37,6 +38,7 @@@ our $DEPEND_LINKS=4 # Optimisation. use Memoize; memoize("abs2rel"); +memoize("sortspec_translate"); memoize("pagespec_translate"); memoize("template_file"); @@@ -335,20 -334,11 +336,20 @@@ sub getsetup () safe => 0, # paranoia rebuild => 0, }, + include => { + type => "string", + default => undef, + example => '^\.htaccess$', + description => "regexp of normally excluded files to include", + advanced => 1, + safe => 0, # regexp + rebuild => 1, + }, exclude => { type => "string", default => undef, - example => '\.wav$', - description => "regexp of source files to ignore", + example => '^(*\.private|Makefile)$', + description => "regexp of files that should be skipped", advanced => 1, safe => 0, # regexp rebuild => 1, @@@ -419,13 -409,6 +420,13 @@@ safe => 0, rebuild => 0, }, + clean => { + type => "internal", + default => 0, + description => "running in clean mode", + safe => 0, + rebuild => 0, + }, refresh => { type => "internal", default => 0, @@@ -440,9 -423,10 +441,9 @@@ safe => 0, rebuild => 0, }, - getctime => { + gettime => { type => "internal", - default => 0, - description => "running in getctime mode", + description => "running in gettime mode", safe => 0, rebuild => 0, }, @@@ -467,13 -451,6 +468,13 @@@ safe => 0, rebuild => 0, }, + setuptype => { + type => "internal", + default => "Standard", + description => "perl class to use to dump setup file", + safe => 0, + rebuild => 0, + }, allow_symlinks_before_srcdir => { type => "boolean", default => 0, @@@ -964,12 -941,7 +965,12 @@@ sub linkpage ($) sub cgiurl (@) { my %params=@_; - return $config{cgiurl}."?". + my $cgiurl=$config{cgiurl}; + if (exists $params{cgiurl}) { + $cgiurl=$params{cgiurl}; + delete $params{cgiurl}; + } + return $cgiurl."?". join("&", map $_."=".uri_escape_utf8($params{$_}), keys %params); } @@@ -1118,11 -1090,6 +1119,11 @@@ sub htmllink ($$$;@) return "$linktext"; } +sub userpage ($) { + my $user=shift; + return length $config{userdir} ? "$config{userdir}/$user" : $user; +} + sub openiduser ($) { my $user=shift; @@@ -1131,10 -1098,11 +1132,10 @@@ my $display; if (Net::OpenID::VerifiedIdentity->can("DisplayOfURL")) { - # this works in at least 2.x $display = Net::OpenID::VerifiedIdentity::DisplayOfURL($user); } else { - # this only works in 1.x + # backcompat with old version my $oid=Net::OpenID::VerifiedIdentity->new(identity => $user); $display=$oid->display; } @@@ -1147,7 -1115,7 +1148,7 @@@ # Convert "http://somehost.com/user" to "user [somehost.com]". # (also "https://somehost.com/user/") if ($display !~ /\[/) { - $display=~s/^https?:\/\/(.+)\/([^\/]+)\/?$/$2 [$1]/; + $display=~s/^https?:\/\/(.+)\/([^\/#?]+)\/?(?:[#?].*)?$/$2 [$1]/; } $display=~s!^https?://!!; # make sure this is removed eval q{use CGI 'escapeHTML'}; @@@ -1157,6 -1125,23 +1158,6 @@@ return; } -sub userlink ($) { - my $user=shift; - - my $oiduser=eval { openiduser($user) }; - if (defined $oiduser) { - return "$oiduser"; - } - else { - eval q{use CGI 'escapeHTML'}; - error($@) if $@; - - return htmllink("", "", escapeHTML( - length $config{userdir} ? $config{userdir}."/".$user : $user - ), noimageinline => 1); - } -} - sub htmlize ($$$$) { my $page=shift; my $destpage=shift; @@@ -1164,7 -1149,7 +1165,7 @@@ my $content=shift; my $oneline = $content !~ /\n/; - + if (exists $hooks{htmlize}{$type}) { $content=$hooks{htmlize}{$type}{call}->( page => $page, @@@ -1185,9 -1170,10 +1186,9 @@@ if ($oneline) { # hack to get rid of enclosing junk added by markdown - # and other htmlizers + # and other htmlizers/sanitizers $content=~s/^

//i; - $content=~s/<\/p>$//i; - chomp $content; + $content=~s/<\/p>\n*$//i; } return $content; @@@ -1242,7 -1228,7 +1243,7 @@@ sub preprocess ($$$;$$) (?: """(.*?)""" # 2: triple-quoted value | - "([^"]+)" # 3: single-quoted value + "([^"]*?)" # 3: single-quoted value | (\S+) # 4: unquoted value ) @@@ -1328,7 -1314,7 +1329,7 @@@ (?: """.*?""" # triple-quoted value | - "[^"]+" # single-quoted value + "[^"]*?" # single-quoted value | [^"\s\]]+ # unquoted value ) @@@ -1351,7 -1337,7 +1352,7 @@@ (?: """.*?""" # triple-quoted value | - "[^"]+" # single-quoted value + "[^"]*?" # single-quoted value | [^"\s\]]+ # unquoted value ) @@@ -1502,7 -1488,7 +1503,7 @@@ sub loadindex () if (! $config{rebuild}) { %pagesources=%pagemtime=%oldlinks=%links=%depends= %destsources=%renderedfiles=%pagecase=%pagestate= - %depends_simple=(); + %depends_simple=%typedlinks=%oldtypedlinks=(); } my $in; if (! open ($in, "<", "$config{wikistatedir}/indexdb")) { @@@ -1511,7 -1497,6 +1512,7 @@@ open ($in, "<", "$config{wikistatedir}/indexdb") || return; } else { + $config{gettime}=1; # first build return; } } @@@ -1569,14 -1554,6 +1570,14 @@@ if (exists $d->{state}) { $pagestate{$page}=$d->{state}; } + if (exists $d->{typedlinks}) { + $typedlinks{$page}=$d->{typedlinks}; + + while (my ($type, $links) = each %{$typedlinks{$page}}) { + next unless %$links; + $oldtypedlinks{$page}{$type} = {%$links}; + } + } } $oldrenderedfiles{$page}=[@{$d->{dest}}]; } @@@ -1625,10 -1602,6 +1626,10 @@@ sub saveindex () $index{page}{$src}{depends_simple} = $depends_simple{$page}; } + if (exists $typedlinks{$page} && %{$typedlinks{$page}}) { + $index{page}{$src}{typedlinks} = $typedlinks{$page}; + } + if (exists $pagestate{$page}) { foreach my $id (@hookids) { foreach my $key (keys %{$pagestate{$page}{$id}}) { @@@ -1790,10 -1763,6 +1791,10 @@@ sub rcs_getctime ($) $hooks{rcs}{rcs_getctime}{call}->(@_); } +sub rcs_getmtime ($) { + $hooks{rcs}{rcs_getmtime}{call}->(@_); +} + sub rcs_receive () { $hooks{rcs}{rcs_receive}{call}->(); } @@@ -1812,7 -1781,7 +1813,7 @@@ sub add_depends ($$;$) # Add explicit dependencies for influences. my $sub=pagespec_translate($pagespec); - return if $@; + return unless defined $sub; foreach my $p (keys %pagesources) { my $r=$sub->($p, location => $page); my $i=$r->influences; @@@ -1842,7 -1811,6 +1843,7 @@@ sub deptype (@) return $deptype; } +my $file_prune_regexp; sub file_pruned ($;$) { my $file=shift; if (@_) { @@@ -1853,52 -1821,34 +1854,52 @@@ $file =~ s#^\Q$base\E/+##; } - my $regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; - return $file =~ m/$regexp/; + if (defined $config{include} && length $config{include}) { + return 0 if $file =~ m/$config{include}/; + } + + if (! defined $file_prune_regexp) { + $file_prune_regexp='('.join('|', @{$config{wiki_file_prune_regexps}}).')'; + $file_prune_regexp=qr/$file_prune_regexp/; + } + return $file =~ m/$file_prune_regexp/; } sub define_gettext () { # If translation is needed, redefine the gettext function to do it. # Otherwise, it becomes a quick no-op. - no warnings 'redefine'; + my $gettext_obj; + my $getobj; if ((exists $ENV{LANG} && length $ENV{LANG}) || (exists $ENV{LC_ALL} && length $ENV{LC_ALL}) || (exists $ENV{LC_MESSAGES} && length $ENV{LC_MESSAGES})) { - *gettext=sub { - my $gettext_obj=eval q{ + $getobj=sub { + $gettext_obj=eval q{ use Locale::gettext q{textdomain}; Locale::gettext->domain('ikiwiki') }; - - if ($gettext_obj) { - $gettext_obj->get(shift); - } - else { - return shift; - } }; } - else { - *gettext=sub { return shift }; - } + + no warnings 'redefine'; + *gettext=sub { + $getobj->() if $getobj; + if ($gettext_obj) { + $gettext_obj->get(shift); + } + else { + return shift; + } + }; + *ngettext=sub { + $getobj->() if $getobj; + if ($gettext_obj) { + $gettext_obj->nget(@_); + } + else { + return ($_[2] == 1 ? $_[0] : $_[1]) + } + }; } sub gettext { @@@ -1906,11 -1856,6 +1907,11 @@@ gettext(@_); } +sub ngettext { + define_gettext(); + ngettext(@_); +} + sub yesno ($) { my $val=shift; @@@ -1942,84 -1887,38 +1943,108 @@@ sub inject use warnings; } -sub add_link ($$) { +sub add_link ($$;$) { my $page=shift; my $link=shift; + my $type=shift; push @{$links{$page}}, $link unless grep { $_ eq $link } @{$links{$page}}; + + if (defined $type) { + $typedlinks{$page}{$type}{$link} = 1; + } +} + +sub sortspec_translate ($$) { + my $spec = shift; + my $reverse = shift; + + my $code = ""; + my @data; + while ($spec =~ m{ + \s* + (-?) # group 1: perhaps negated + \s* + ( # group 2: a word + \w+\([^\)]*\) # command(params) + | + [^\s]+ # or anything else + ) + \s* + }gx) { + my $negated = $1; + my $word = $2; + my $params = undef; + + if ($word =~ m/^(\w+)\((.*)\)$/) { + # command with parameters + $params = $2; + $word = $1; + } + elsif ($word !~ m/^\w+$/) { + error(sprintf(gettext("invalid sort type %s"), $word)); + } + + if (length $code) { + $code .= " || "; + } + + if ($negated) { + $code .= "-"; + } + + if (exists $IkiWiki::SortSpec::{"cmp_$word"}) { + if (defined $params) { + push @data, $params; + $code .= "IkiWiki::SortSpec::cmp_$word(\$data[$#data])"; + } + else { + $code .= "IkiWiki::SortSpec::cmp_$word(undef)"; + } + } + else { + error(sprintf(gettext("unknown sort type %s"), $word)); + } + } + + if (! length $code) { + # undefined sorting method... sort arbitrarily + return sub { 0 }; + } + + if ($reverse) { + $code="-($code)"; + } + + no warnings; + return eval 'sub { '.$code.' }'; } + sub add_autofile ($$) { + my $autofile=shift; + my $plugin=shift; + + if (srcfile($autofile, 1)) { + return 0; + } + + my ($file, $page) = verify_src_file("$config{srcdir}/$autofile", $config{srcdir}); + + if ((!defined $file) || + (exists $pagestate{$page}{$plugin}{autofile_deleted})) { + return 0; + } + + if (exists $del_hash{$file}) { + $pagestate{$page}{$plugin}{autofile_deleted}=1; + return 0; + } + + $autofiles{$file}=$plugin; + return 1; + } + sub pagespec_translate ($) { my $spec=shift; @@@ -2087,7 -1986,7 +2112,7 @@@ sub pagespec_match ($$;@) my $sub=pagespec_translate($spec); return IkiWiki::ErrorReason->new("syntax error in pagespec \"$spec\"") - if $@ || ! defined $sub; + if ! defined $sub; return $sub->($page, @params); } @@@ -2105,9 -2004,7 +2130,9 @@@ sub pagespec_match_list ($$;@) my $sub=pagespec_translate($pagespec); error "syntax error in pagespec \"$pagespec\"" - if $@ || ! defined $sub; + if ! defined $sub; + my $sort=sortspec_translate($params{sort}, $params{reverse}) + if defined $params{sort}; my @candidates; if (exists $params{list}) { @@@ -2120,19 -2017,39 +2145,19 @@@ ? grep { ! $params{filter}->($_) } keys %pagesources : keys %pagesources; } - - if (defined $params{sort}) { - my $f; - if ($params{sort} eq 'title') { - $f=sub { pagetitle(basename($a)) cmp pagetitle(basename($b)) }; - } - elsif ($params{sort} eq 'title_natural') { - eval q{use Sort::Naturally}; - if ($@) { - error(gettext("Sort::Naturally needed for title_natural sort")); - } - $f=sub { Sort::Naturally::ncmp(pagetitle(basename($a)), pagetitle(basename($b))) }; - } - elsif ($params{sort} eq 'mtime') { - $f=sub { $pagemtime{$b} <=> $pagemtime{$a} }; - } - elsif ($params{sort} eq 'age') { - $f=sub { $pagectime{$b} <=> $pagectime{$a} }; - } - else { - error sprintf(gettext("unknown sort type %s"), $params{sort}); - } - @candidates = sort { &$f } @candidates; - } - - @candidates=reverse(@candidates) if $params{reverse}; - - $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT); # clear params, remainder is passed to pagespec + $depends{$page}{$pagespec} |= ($params{deptype} || $DEPEND_CONTENT); my $num=$params{num}; delete @params{qw{num deptype reverse sort filter list}}; + # when only the top matches will be returned, it's efficient to + # sort before matching to pagespec, + if (defined $num && defined $sort) { + @candidates=IkiWiki::SortSpec::sort_pages( + $sort, @candidates); + } + my @matches; my $firstfail; my $count=0; @@@ -2154,21 -2071,14 +2179,21 @@@ $depends_simple{$page}{lc $k} |= $i->{$k}; } - return @matches; + # when all matches will be returned, it's efficient to + # sort after matching + if (! defined $num && defined $sort) { + return IkiWiki::SortSpec::sort_pages( + $sort, @matches); + } + else { + return @matches; + } } sub pagespec_valid ($) { my $spec=shift; - my $sub=pagespec_translate($spec); - return ! $@; + return defined pagespec_translate($spec); } sub glob2re ($) { @@@ -2288,34 -2198,26 +2313,34 @@@ sub match_link ($$;@) $link=derel($link, $params{location}); my $from=exists $params{location} ? $params{location} : ''; + my $linktype=$params{linktype}; + my $qualifier=''; + if (defined $linktype) { + $qualifier=" with type $linktype"; + } my $links = $IkiWiki::links{$page}; - return IkiWiki::FailReason->new("$page has no links", "" => 1) + return IkiWiki::FailReason->new("$page has no links", $page => $IkiWiki::DEPEND_LINKS, "" => 1) unless $links && @{$links}; my $bestlink = IkiWiki::bestlink($from, $link); foreach my $p (@{$links}) { if (length $bestlink) { - return IkiWiki::SuccessReason->new("$page links to $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) - if $bestlink eq IkiWiki::bestlink($page, $p); + if ((!defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p}) && $bestlink eq IkiWiki::bestlink($page, $p)) { + return IkiWiki::SuccessReason->new("$page links to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1) + } } else { - return IkiWiki::SuccessReason->new("$page links to page $p matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) - if match_glob($p, $link, %params); + if ((!defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p}) && match_glob($p, $link, %params)) { + return IkiWiki::SuccessReason->new("$page links to page $p$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) + } my ($p_rel)=$p=~/^\/?(.*)/; $link=~s/^\///; - return IkiWiki::SuccessReason->new("$page links to page $p_rel matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) - if match_glob($p_rel, $link, %params); + if ((!defined $linktype || exists $IkiWiki::typedlinks{$page}{$linktype}{$p_rel}) && match_glob($p_rel, $link, %params)) { + return IkiWiki::SuccessReason->new("$page links to page $p_rel$qualifier, matching $link", $page => $IkiWiki::DEPEND_LINKS, "" => 1) + } } } - return IkiWiki::FailReason->new("$page does not link to $link", "" => 1); + return IkiWiki::FailReason->new("$page does not link to $link$qualifier", $page => $IkiWiki::DEPEND_LINKS, "" => 1); } sub match_backlink ($$;@) { @@@ -2396,13 -2298,11 +2421,13 @@@ sub match_user ($$;@) my $user=shift; my %params=@_; + my $regexp=IkiWiki::glob2re($user); + if (! exists $params{user}) { return IkiWiki::ErrorReason->new("no user specified"); } - if (defined $params{user} && lc $params{user} eq lc $user) { + if (defined $params{user} && $params{user}=~/^$regexp$/i) { return IkiWiki::SuccessReason->new("user is $user"); } elsif (! defined $params{user}) { @@@ -2450,22 -2350,4 +2475,22 @@@ sub match_ip ($$;@) } } +package IkiWiki::SortSpec; + +# This is in the SortSpec namespace so that the $a and $b that sort() uses +# are easily available in this namespace, for cmp functions to use them. +sub sort_pages { + my $f=shift; + sort $f @_ +} + +sub cmp_title { + IkiWiki::pagetitle(IkiWiki::basename($a)) + cmp + IkiWiki::pagetitle(IkiWiki::basename($b)) +} + +sub cmp_mtime { $IkiWiki::pagemtime{$b} <=> $IkiWiki::pagemtime{$a} } +sub cmp_age { $IkiWiki::pagectime{$b} <=> $IkiWiki::pagectime{$a} } + 1 diff --combined IkiWiki/Plugin/tag.pm index 7a85874f6,fdd63d637..9e6f417bf --- a/IkiWiki/Plugin/tag.pm +++ b/IkiWiki/Plugin/tag.pm @@@ -6,6 -6,8 +6,6 @@@ use warnings use strict; use IkiWiki 3.00; -my %tags; - sub import { hook(type => "getopt", id => "tag", call => \&getopt); hook(type => "getsetup", id => "tag", call => \&getsetup); @@@ -34,6 -36,13 +34,13 @@@ sub getsetup () safe => 1, rebuild => 1, }, + tag_autocreate => { + type => "boolean", + example => 0, + description => "Autocreate new tag pages", + safe => 1, + rebuild => 1, + }, } sub tagpage ($) { @@@ -57,6 -66,22 +64,22 @@@ sub taglink ($$$;@) return htmllink($page, $destpage, tagpage($tag), %opts); } + sub gentag ($) { + my $tag=shift; + if (defined $config{tag_autocreate} && $config{tag_autocreate}) { + my $tagfile = newpagefile(tagpage($tag), $config{default_pageext}); + $tagfile=~s/^\///; + + return if (! add_autofile($tagfile, "tag")); + + debug(sprintf(gettext("creating tag page %s"), $tag)); + + my $template=template("autotag.tmpl"); + $template->param(tag => $tag); + writefile($tagfile, $config{srcdir}, $template->output); + } + } + sub preprocess_tag (@) { if (! @_) { return ""; @@@ -69,8 -94,13 +92,12 @@@ foreach my $tag (keys %params) { $tag=linkpage($tag); - $tags{$page}{$tag}=1; - ++ + # hidden WikiLink + add_link($page, tagpage($tag), 'tag'); ++ + # add tagpage if necessary + gentag($tag); - - # hidden WikiLink - add_link($page, tagpage($tag)); } return ""; @@@ -84,13 -114,15 +111,13 @@@ sub preprocess_taglink (@) return join(" ", map { if (/(.*)\|(.*)/) { my $tag=linkpage($2); - $tags{$params{page}}{$tag}=1; - add_link($params{page}, tagpage($tag)); + add_link($params{page}, tagpage($tag), 'tag'); return taglink($params{page}, $params{destpage}, $tag, linktext => pagetitle($1)); } else { my $tag=linkpage($_); - $tags{$params{page}}{$tag}=1; - add_link($params{page}, tagpage($tag)); + add_link($params{page}, tagpage($tag), 'tag'); return taglink($params{page}, $params{destpage}, $tag); } } @@@ -105,19 -137,17 +132,19 @@@ sub pagetemplate (@) my $destpage=$params{destpage}; my $template=$params{template}; + my $tags = $typedlinks{$page}{tag}; + $template->param(tags => [ map { link => taglink($page, $destpage, $_, rel => "tag") - }, sort keys %{$tags{$page}} - ]) if exists $tags{$page} && %{$tags{$page}} && $template->query(name => "tags"); + }, sort keys %$tags + ]) if defined $tags && %$tags && $template->query(name => "tags"); if ($template->query(name => "categories")) { # It's an rss/atom template. Add any categories. - if (exists $tags{$page} && %{$tags{$page}}) { + if (defined $tags && %$tags) { $template->param(categories => [map { category => $_ }, - sort keys %{$tags{$page}}]); + sort keys %$tags]); } } } @@@ -125,7 -155,9 +152,7 @@@ package IkiWiki::PageSpec; sub match_tagged ($$;@) { - my $page = shift; - my $glob = shift; - return match_link($page, IkiWiki::Plugin::tag::tagpage($glob)); + return match_link($_[0], IkiWiki::Plugin::tag::tagpage($_[1]), linktype => 'tag'); } 1 diff --combined IkiWiki/Render.pm index a6b0f0617,0c21455fb..796af6af2 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm @@@ -167,7 -167,6 +167,7 @@@ sub scan ($) else { $links{$page}=[]; } + delete $typedlinks{$page}; run_hooks(scan => sub { shift->( @@@ -281,6 -280,27 +281,27 @@@ sub srcdir_check () } + sub verify_src_file ($$) { + my $file=decode_utf8(shift); + my $dir=shift; + + return if -l $file || -d _; + $file=~s/^\Q$dir\E\/?//; + return if ! length $file; + my $page = pagename($file); + if (! exists $pagesources{$page} && + file_pruned($file)) { + $File::Find::prune=1; + return; + } + + my ($file_untainted) = $file =~ /$config{wiki_file_regexp}/; # untaint + if (! defined $file_untainted) { + warn(sprintf(gettext("skipping bad filename %s"), $file)."\n"); + } + return ($file_untainted, $page); + } + sub find_src_files () { my @files; my %pages; @@@ -289,22 -309,9 +310,9 @@@ find({ no_chdir => 1, wanted => sub { - my $file=decode_utf8($_); - $file=~s/^\Q$config{srcdir}\E\/?//; - return if -l $_ || -d _ || ! length $file; - my $page = pagename($file); - if (! exists $pagesources{$page} && - file_pruned($file)) { - $File::Find::prune=1; - return; - } - - my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint - if (! defined $f) { - warn(sprintf(gettext("skipping bad filename %s"), $file)."\n"); - } - else { - push @files, $f; + my ($file, $page) = verify_src_file($_, $config{srcdir}); + if (defined $file) { + push @files, $file; if ($pages{$page}) { debug(sprintf(gettext("%s has multiple possible source pages"), $page)); } @@@ -316,27 -323,14 +324,14 @@@ find({ no_chdir => 1, wanted => sub { - my $file=decode_utf8($_); - $file=~s/^\Q$dir\E\/?//; - return if -l $_ || -d _ || ! length $file; - my $page=pagename($file); - if (! exists $pagesources{$page} && - file_pruned($file)) { - $File::Find::prune=1; - return; - } - - my ($f) = $file =~ /$config{wiki_file_regexp}/; # untaint - if (! defined $f) { - warn(sprintf(gettext("skipping bad filename %s"), $file)."\n"); - } - else { + my ($file, $page) = verify_src_file($_, $dir); + if (defined $file) { # avoid underlaydir override # attacks; see security.mdwn - if (! -l "$config{srcdir}/$f" && + if (! -l "$config{srcdir}/$file" && ! -e _) { if (! $pages{$page}) { - push @files, $f; + push @files, $file; $pages{$page}=1; } } @@@ -352,8 -346,6 +347,8 @@@ sub find_new_files ($) my @new; my @internal_new; + my $times_noted; + foreach my $file (@$files) { my $page=pagename($file); if (exists $pagesources{$page} && $pagesources{$page} ne $file) { @@@ -365,33 -357,16 +360,33 @@@ if (isinternal($page)) { push @internal_new, $file; } - else { + elsif ($config{rcs}) { + if (! $times_noted) { + debug(sprintf(gettext("querying %s for file creation and modification times.."), $config{rcs})); + $times_noted=1; + } + push @new, $file; - if ($config{getctime} && -e "$config{srcdir}/$file") { + if ($config{gettime} && -e "$config{srcdir}/$file") { eval { - my $time=rcs_getctime("$config{srcdir}/$file"); - $pagectime{$page}=$time; + my $ctime=rcs_getctime("$config{srcdir}/$file"); + if ($ctime > 0) { + $pagectime{$page}=$ctime; + } }; if ($@) { print STDERR $@; } + my $mtime; + eval { + $mtime=rcs_getmtime("$config{srcdir}/$file"); + }; + if ($@) { + print STDERR $@; + } + elsif ($mtime > 0) { + utime($mtime, $mtime, "$config{srcdir}/$file"); + } } } $pagecase{lc $page}=$page; @@@ -418,7 -393,6 +413,7 @@@ sub find_del_files ($) push @del, $pagesources{$page}; } $links{$page}=[]; + delete $typedlinks{$page}; $renderedfiles{$page}=[]; $pagemtime{$page}=0; } @@@ -520,29 -494,6 +515,29 @@@ sub remove_unrendered () } } +sub link_types_changed ($$) { + # each is of the form { type => { link => 1 } } + my $new = shift; + my $old = shift; + + return 0 if !defined $new && !defined $old; + return 1 if !defined $new || !defined $old; + + while (my ($type, $links) = each %$new) { + foreach my $link (keys %$links) { + return 1 unless exists $old->{$type}{$link}; + } + } + + while (my ($type, $links) = each %$old) { + foreach my $link (keys %$links) { + return 1 unless exists $new->{$type}{$link}; + } + } + + return 0; +} + sub calculate_changed_links ($$$) { my ($changed, $del, $oldlink_targets)=@_; @@@ -569,14 -520,6 +564,14 @@@ } $linkchangers{lc($page)}=1; } + + # we currently assume that changing the type of a link doesn't + # change backlinks + if (!exists $linkchangers{lc($page)}) { + if (link_types_changed($typedlinks{$page}, $oldtypedlinks{$page})) { + $linkchangers{lc($page)}=1; + } + } } return \%backlinkchanged, \%linkchangers; @@@ -617,7 -560,7 +612,7 @@@ sub render_dependent ($$$$$$$) if (exists $depends{$p} && ! defined $reason) { foreach my $dep (keys %{$depends{$p}}) { my $sub=pagespec_translate($dep); - next if $@ || ! defined $sub; + next unless defined $sub; # only consider internal files # if the page explicitly depends @@@ -694,11 -637,28 +689,28 @@@ sub refresh () my ($changed, $internal_changed)=find_changed($files); run_hooks(needsbuild => sub { shift->($changed) }); my $oldlink_targets=calculate_old_links($changed, $del); + %del_hash = map { $_ => 1 } @{$del}; foreach my $file (@$changed) { scan($file); } + while (my $autofile = shift @{[keys %autofiles]}) { + my $plugin=$autofiles{$autofile}; + my $page=pagename($autofile); + if ($pages->{$page}) { + debug(sprintf(gettext("%s has multiple possible source pages"), $page)); + } + $pages->{$page}=1; + + push @{$files}, $autofile; + push @{$new}, $autofile if find_new_files([$autofile]); + push @{$changed}, $autofile if find_changed([$autofile]); + + scan($autofile); + delete $autofiles{$autofile}; + } + calculate_links(); remove_del(@$del, @$internal_del); @@@ -735,17 -695,6 +747,17 @@@ } } +sub clean_rendered { + lockwiki(); + loadindex(); + remove_unrendered(); + foreach my $page (keys %oldrenderedfiles) { + foreach my $file (@{$oldrenderedfiles{$page}}) { + prune($config{destdir}."/".$file); + } + } +} + sub commandline_render () { lockwiki(); loadindex();