use warnings;
use strict;
use Encode;
+use HTML::Entities;
use open qw{:utf8 :std};
# Optimisation.
return $page."style.css";
} #}}}
-sub abs2rel ($$) {
+sub abs2rel ($$) { #{{{
# Work around very innefficient behavior in File::Spec if abs2rel
# is passed two relative paths. It's much faster if paths are
# absolute!
my $ret=File::Spec->abs2rel($path, $base);
$ret=~s/^// if defined $ret;
return $ret;
-}
+} #}}}
sub htmllink ($$$;$$$) { #{{{
my $lpage=shift; # the page doing the linking
$items{link}=[];
foreach my $i (split(/ /, $_)) {
my ($item, $val)=split(/=/, $i, 2);
- push @{$items{$item}}, $val;
+ push @{$items{$item}}, decode_entities($val);
}
next unless exists $items{src}; # skip bad lines for now
$oldpagemtime{$page}=$items{mtime}[0];
$oldlinks{$page}=[@{$items{link}}];
$links{$page}=[@{$items{link}}];
- $depends{$page}=join(" ", @{$items{depends}})
- if exists $items{depends};
+ $depends{$page}=$items{depends}[0] if exists $items{depends};
$renderedfiles{$page}=$items{dest}[0];
}
$pagectime{$page}=$items{ctime}[0];
"dest=$renderedfiles{$page}";
$line.=" link=$_" foreach @{$links{$page}};
if (exists $depends{$page}) {
- $line.=" depends=$_" foreach split " ", $depends{$page};
+ $line.=" depends=".encode_entities($depends{$page}, " \t\n");
}
print OUT $line."\n";
}
return $template->output;
}#}}}
-sub glob_match ($$) { #{{{
- my $page=shift;
- my $glob=shift;
-
- if ($glob =~ /^link\((.+)\)$/) {
- my $rev = $links{$page} or return undef;
- foreach my $p (@$rev) {
- return 1 if lc $p eq $1;
- }
- return 0;
- } elsif ($glob =~ /^backlink\((.+)\)$/) {
- my $rev = $links{$1} or return undef;
- foreach my $p (@$rev) {
- return 1 if lc $p eq $page;
- }
- return 0;
- } else {
- # turn glob into safe regexp
- $glob=quotemeta($glob);
- $glob=~s/\\\*/.*/g;
- $glob=~s/\\\?/./g;
- $glob=~s!\\/!/!g;
-
- return $page=~/^$glob$/i;
- }
-} #}}}
-
-sub globlist_match ($$) { #{{{
- my $page=shift;
- my @globlist=split(" ", shift);
-
- # check any negated globs first
- foreach my $glob (@globlist) {
- return 0 if $glob=~/^!(.*)/ && glob_match($page, $1);
- }
-
- foreach my $glob (@globlist) {
- return 1 if glob_match($page, $glob);
- }
-
- return 0;
-} #}}}
-
sub hook (@) { # {{{
my %param=@_;
}
} #}}}
+sub globlist_to_pagespec ($) { #{{{
+ my @globlist=split(' ', shift);
+
+ my (@spec, @skip);
+ foreach my $glob (@globlist) {
+ if ($glob=~/^!(.*)/) {
+ push @skip, $glob;
+ }
+ else {
+ push @spec, $glob;
+ }
+ }
+
+ my $spec=join(" or ", @spec);
+ if (@skip) {
+ my $skip=join(" and ", @skip);
+ if (length $spec) {
+ $spec="$skip and ($spec)";
+ }
+ else {
+ $spec=$skip;
+ }
+ }
+ return $spec;
+} #}}}
+
+sub is_globlist ($) { #{{{
+ my $s=shift;
+ $s=~/[^\s]+\s+([^\s]+)/ && $1 ne "and" && $1 ne "or";
+} #}}}
+
+sub safequote ($) { #{{{
+ my $s=shift;
+ $s=~s/[{}]//g;
+ return "q{$s}";
+} #}}}
+
+sub pagespec_merge ($$) { #{{{
+ my $a=shift;
+ my $b=shift;
+
+ # Support for old-style GlobLists.
+ if (is_globlist($a)) {
+ $a=globlist_to_pagespec($a);
+ }
+ if (is_globlist($b)) {
+ $b=globlist_to_pagespec($b);
+ }
+
+ return "($a) or ($b)";
+} #}}}
+
+sub pagespec_match ($$) { #{{{
+ my $page=shift;
+ my $spec=shift;
+
+ # Support for old-style GlobLists.
+ if (is_globlist($spec)) {
+ $spec=globlist_to_pagespec($spec);
+ }
+
+ # Convert spec to perl code.
+ my $code="";
+ while ($spec=~m/\s*(\!|\(|\)|\w+\([^\)]+\)|[^\s()]+)\s*/ig) {
+ my $word=$1;
+ if (lc $word eq "and") {
+ $code.=" &&";
+ }
+ elsif (lc $word eq "or") {
+ $code.=" ||";
+ }
+ elsif ($word eq "(" || $word eq ")" || $word eq "!") {
+ $code.=" ".$word;
+ }
+ elsif ($word =~ /^(link|backlink|creation_month|creation_year|creation_day)\((.+)\)$/) {
+ $code.=" match_$1(\$page, ".safequote($2).")";
+ }
+ else {
+ $code.=" match_glob(\$page, ".safequote($word).")";
+ }
+ }
+
+ return eval $code;
+} #}}}
+
+sub match_glob ($$) { #{{{
+ my $page=shift;
+ my $glob=shift;
+
+ # turn glob into safe regexp
+ $glob=quotemeta($glob);
+ $glob=~s/\\\*/.*/g;
+ $glob=~s/\\\?/./g;
+
+ return $page=~/^$glob$/;
+} #}}}
+
+sub match_link ($$) { #{{{
+ my $page=shift;
+ my $link=shift;
+
+ my $links = $links{$page} or return undef;
+ foreach my $p (@$links) {
+ return 1 if lc $p eq $link;
+ }
+ return 0;
+} #}}}
+
+sub match_backlink ($$) { #{{{
+ my $page=shift;
+ my $linkto=shift;
+
+ my $links = $links{$linkto} or return undef;
+ foreach my $p (@$links) {
+ return 1 if lc $p eq $page;
+ }
+ return 0;
+} #}}}
+
+sub match_creation_day ($$) { #{{{
+ return if (localtime($pagectime{shift()}))[3] == shift;
+} #}}}
+
+sub match_creation_month ($$) { #{{{
+ return if (localtime($pagectime{shift()}))[4] + 1 == shift;
+} #}}}
+
+sub match_creation_year ($$) { #{{{
+ return if (localtime($pagectime{shift()}))[5] + 1900 == shift;
+} #}}}
+
+
1
foreach my $admin (@{$config{adminuser}}) {
my $locked_pages=userinfo_get($admin, "locked_pages");
- if (globlist_match($page, userinfo_get($admin, "locked_pages"))) {
+ if (pagespec_match($page, userinfo_get($admin, "locked_pages"))) {
return 1 if $nonfatal;
error(htmllink("", "", $page, 1)." is locked by ".
htmllink("", "", $admin, 1)." and cannot be edited.");
$form->field(name => "password", type => "password");
$form->field(name => "confirm_password", type => "password");
$form->field(name => "subscriptions", size => 50,
- comment => "(".htmllink("", "", "GlobList", 1).")");
+ comment => "(".htmllink("", "", "PageSpec", 1).")");
$form->field(name => "locked_pages", size => 50,
- comment => "(".htmllink("", "", "GlobList", 1).")");
+ comment => "(".htmllink("", "", "PageSpec", 1).")");
if (! is_admin($user_name)) {
$form->field(name => "locked_pages", type => "hidden");
} #}}}
sub loadstate () { #{{{
- eval q{use HTML::Entities};
- die $@ if $@;
if (-e "$IkiWiki::config{wikistatedir}/aggregate") {
open (IN, "$IkiWiki::config{wikistatedir}/aggregate" ||
die "$IkiWiki::config{wikistatedir}/aggregate: $!");
my @broken;
foreach my $page (keys %IkiWiki::links) {
- if (IkiWiki::globlist_match($page, $params{pages})) {
+ if (IkiWiki::pagespec_match($page, $params{pages})) {
foreach my $link (@{$IkiWiki::links{$page}}) {
next if $link =~ /.*\/discussion/i && $IkiWiki::config{discussion};
my $bestlink=IkiWiki::bestlink($page, $link);
my @list;
foreach my $page (keys %pagesources) {
next if $page eq $params{page};
- if (globlist_match($page, $params{pages})) {
+ if (pagespec_match($page, $params{pages})) {
push @list, $page;
}
}
my @orphans;
foreach my $page (keys %IkiWiki::renderedfiles) {
next if $linkedto{$page};
- next unless IkiWiki::globlist_match($page, $params{pages});
+ next unless IkiWiki::pagespec_match($page, $params{pages});
# If the page has a link to some other page, it's
# indirectly linked to a page via that page's backlinks.
next if grep {
return $#pages+1 if $params{pages} eq "*"; # optimisation
my $count=0;
foreach my $page (@pages) {
- $count++ if IkiWiki::globlist_match($page, $params{pages});
+ $count++ if IkiWiki::pagespec_match($page, $params{pages});
}
return $count;
} # }}}
my %counts;
my $max = 0;
foreach my $page (keys %IkiWiki::links) {
- if (IkiWiki::globlist_match($page, $params{pages})) {
+ if (IkiWiki::pagespec_match($page, $params{pages})) {
my @bl = IkiWiki::backlinks($page);
$counts{$page} = scalar(@bl);
$max = $counts{$page} if $counts{$page} > $max;
sub add_depends ($$) { #{{{
my $page=shift;
- my $globlist=shift;
+ my $pagespec=shift;
if (! exists $depends{$page}) {
- $depends{$page}=$globlist;
+ $depends{$page}=$pagespec;
}
else {
- $depends{$page}=globlist_merge($depends{$page}, $globlist);
+ $depends{$page}=pagespec_merge($depends{$page}, $pagespec);
}
} # }}}
-sub globlist_merge ($$) { #{{{
- my $a=shift;
- my $b=shift;
-
- my $ret="";
- # Only add negated globs if they are not matched by the other globlist.
- foreach my $i ((map { [ $a, $_ ] } split(" ", $b)),
- (map { [ $b, $_ ] } split(" ", $a))) {
- if ($i->[1]=~/^!(.*)/) {
- if (! globlist_match($1, $i->[0])) {
- $ret.=" ".$i->[1];
- }
- }
- else {
- $ret.=" ".$i->[1];
- }
- }
-
- return $ret;
-} #}}}
-
sub genpage ($$$) { #{{{
my $page=shift;
my $content=shift;
foreach my $file (keys %rendered, @del) {
next if $f eq $file;
my $page=pagename($file);
- if (globlist_match($page, $depends{$p})) {
+ if (pagespec_match($page, $depends{$p})) {
debug("rendering $f, which depends on $page");
render($f);
$rendered{$f}=1;
length $userinfo->{$user}->{subscriptions} &&
exists $userinfo->{$user}->{email} &&
length $userinfo->{$user}->{email} &&
- grep { globlist_match($_, $userinfo->{$user}->{subscriptions}) } @pages) {
+ grep { pagespec_match($_, $userinfo->{$user}->{subscriptions}) } @pages) {
push @ret, $userinfo->{$user}->{email};
}
}
You can turn any page on this wiki into a weblog by inserting a
[[PreProcessorDirective]]. Like this:
- \\[[inline pages="blog/* !*/Discussion" show="10" rootpage="blog"]]
+ \\[[inline pages="blog/* and !*/Discussion" show="10" rootpage="blog"]]
-Any pages that match the specified [[GlobList]] (in the example, any
+Any pages that match the specified [[PageSpec]] (in the example, any
[[SubPage]] of "blog") will be part of the blog, and the newest 10
of them will appear in the page.
If you want your blog to have an archive page listing every post ever made
to it, you can accomplish that like this:
- \\[[inline pages="blog/* !*/Discussion" archive="yes"]]
+ \\[[inline pages="blog/* and !*/Discussion" archive="yes"]]
You can even create an automatically generated list of all the pages on the
wiki, with the most recently added at the top, like this:
- \\[[inline pages="* !*/Discussion" archive="yes"]]
+ \\[[inline pages="* and !*/Discussion" archive="yes"]]
If you want to be able to add pages to a given blog feed by tagging them,
you can do that too. To tag a page, just make it link to a page or pages
-that represent its tags. Then use the special link() [[GlobList]] to match
+that represent its tags. Then use the special link() [[PageSpec]] to match
all pages that have a given tag:
\\[[inline pages="link(life)"]]
Or include some tags and exclude others:
- \\[[inline pages="link(debian) !link(social)"]]
+ \\[[inline pages="link(debian) and !link(social)"]]
+++ /dev/null
-When the wiki stores lists of pages, such as pages that are locked or pages
-whose commit emails you want subscribe to, it uses a GlobList.
-
-This is a list of page names, separated by white space. The "glob" bit is
-that as well as full page names, it can contain glob patterns. "`*`" stands
-in for any part of the page name, and "`?`" for any single letter of its
-name. So if you wanted to list all the pages about tea, and any
-[[SubPage]]s of the SandBox, but not including the SandBox itself:
-
- *tea* SandBox/*
-
-You can also prefix an item in the list with "`!`" to skip matching any
-pages that match it. So if you want to specify all pages except for
-Discussion pages and the SandBox:
-
- * !SandBox !*/Discussion
-
-It's also possible to match pages that link to a given page, by writing
-"link(page)" in a globlist. Or, match pages that a given page links to, by
-writing "backlink(page)".
--- /dev/null
+To select a set of pages, such as pages that are locked, pages
+whose commit emails you want subscribe to, or pages to combine into a
+[[blog]], the wiki uses a PageSpec. This is an expression that matches
+a set of pages.
+
+The simplest PageSpec is a simple list of pages. For example, this matches
+any of the three listed pages:
+
+ foo or bar or baz
+
+More often you will want to match any pages that have a particular thing in
+their name. You can do this using a glob pattern. "`*`" stands for any part
+of a page name, and "`?`" for any single letter of a page name. So this
+matches all pages about music, and any [[SubPage]]s of the SandBox, but does
+not match the SandBox itself:
+
+ *music* or SandBox/*
+
+You can also prefix an item with "`!`" to skip pages that match it. So to
+match all pages except for Discussion pages and the SandBox:
+
+ * and !SandBox and !*/Discussion
+
+It's also possible to match pages that link to a given page, by writing
+"`link(page)`". Or, match pages that a given page links to, by
+writing "`backlink(page)`". Or match pages created in a given month, year,
+or day of the month by writing "`creation_month(month)`",
+"`creation_year(year)`" or "`creation_day(mday)`".
+
+For example, to match all pages in a blog that link to the page about music
+and were written on Mondays in 2005:
+
+ blog/* and link(music) and creation_year(2005) and creation_day(0)
+
+Matches can also be used to limit matching to pages created before or after
+a given date.
+
+More complex expressions can also be created, by using parentheses for
+grouping. For example, to match pages in a blog that are tagged with either
+of two tags, use:
+
+ blog/* and (link(tag/foo) or link(tag/bar))
+
+## Old syntax
+
+The old PageSpec syntax was called a "GlobList", and worked differently in
+two ways:
+
+1. "and" and "or" were not used; any page matching any item from the list
+ matched.
+2. If an item was prefixed with "`!`", then no page matching that item
+ matched, even if it matched an earlier list item.
+
+For example, here is the old way to match all pages except for the SandBox
+and Discussion pages:
+
+ * !SandBox !*/Discussion
+
+Using this old syntax is still supported. However, the old syntax is
+deprecated and will be removed at some point, and using the new syntax is
+recommended.
+ikiwiki (1.13) unstable; urgency=low
+
+ The GlobList format which was used for specifiying sets of pages, has been
+ replaced with a new PageSpec format. While GlobLists will continue to work,
+ that format is deprecated, and you are recommended to use PageSpecs from now
+ on, and also to change any GlobLists in your wiki to PageSpecs.
+
+ See the new PageSpec page for details.
+
+ You will need to rebuild your wiki when upgrading to this version. If you
+ listed your wiki in /etc/ikiwiki/wikilist this will be done automatically
+ when the Debian package is upgraded. Or use ikiiki-mass-rebuild to force a
+ rebuild.
+
+ -- Joey Hess <joeyh@debian.org> Tue, 1 Aug 2006 18:29:51 -0400
+
ikiwiki (1.11) unstable; urgency=low
Some changes to tags in this release, due to a new tag plugin. If you have
index. (Aka Please Please Please, let that be the last one.)
* Patch from Roland Mas to support an rss=no parameter to inline directives.
Closes: #380743
+ * Renamed GlobLists to PageSpecs.
+ * PageSpecs can now include nested parens, "and", and "or". This remains
+ backwards compatible to the old GlobList format. It's implemented by
+ treating the GlobList as a very limited microlanguage that is transformed
+ to perl code that does the matching.
+ * The old GlobList format is deprecated, and I encourage users to switch to
+ using the new PageSpec format. Compatability with the old format will be
+ removed at some point, possibly by 2.0.
+ * Wiki rebuild needed on upgrade to this version due to PageSpec change.
+ * Add support for creation_month and creation_year to PageSpec.
+ Closes: #380680
+ * Changes to index file encoding.
-- Joey Hess <joeyh@debian.org> Tue, 1 Aug 2006 16:00:58 -0400
# Change this when some incompatible change is made that requires
# rebuilding all wikis.
-firstcompat=1.4
+firstcompat=1.13
if [ "$1" = configure ] && \
dpkg --compare-versions "$2" lt "$firstcompat"; then
This is ikiwiki's bug list. Link bugs to [[bugs/done]] when done.
-[[inline pages="bugs/* !bugs/done !link(bugs/done) !*/Discussion" rootpage="bugs" show="30"]]
+[[inline pages="bugs/* and !bugs/done and !link(bugs/done) and !*/Discussion" rootpage="bugs" show="30"]]
----
# Full list of open bugs:
-[[inline pages="bugs/* !bugs/done !link(bugs/done) !*/Discussion" archive="yes" rss="no"]]
+[[inline pages="bugs/* and !bugs/done and !link(bugs/done) and !*/Discussion" archive="yes" rss="no"]]
recently fixed [[bugs]]
-[[inline pages="link(bugs/done) !bugs !*/Discussion" show="10"]]
+[[inline pages="link(bugs/done) and !bugs and !*/Discussion" show="10"]]
and the page is removed (such as by this page being linked to bugs/done),
the inlining page is not updated to remove it.
-This only happens if the page is removed from the inlined globlist due to
+This only happens if the page is removed from the inlined pagespec due to
a tag changing; the problem is that once the tag is changed, ikiwiki does
not know that the page used to match before.
To fix, seems I would need to record the actual list of pages that are
currently included on an inline page, and do a comparison to see if any
have changed. At first I thought, why not just add them to the dependencies
-explicitly, but that fails because the dependencies globlist fails to match
+explicitly, but that fails because the dependencies pagespec fails to match
when a negated expression like "!tag(bugs/done)" is matched.
So, quick fixes aside, what's the generic mechanism here that a plugin can
use to let ikiwiki know that a page should be updated if some other page
-stops matching its dependencies globlist?
+stops matching its dependencies pagespec?
* [[blogging|blog]]
You can turn any page in the wiki into a [[blog]]. Pages matching a
- specified [[GlobList]] will be displayed as a weblog within the blog
+ specified [[PageSpec]] will be displayed as a weblog within the blog
page. And an RSS feed can be generated to follow the blog.
Ikiwiki's own [[TODO]], [[news]], and [[plugins]] pages are good examples
The easiest way to install ikiwiki is using the Debian package.
-Ikiwiki requires [[MarkDown]] be installed, and also uses the following
-perl modules if available: `CGI::Session` `CGI::FormBuilder` (version
-3.02.02 or newer) `HTML::Template` `Mail::SendMail` `Time::Duration`
-`Date::Parse` (libtimedate-perl), `HTML::Scrubber`, `RPC::XML`,
-`XML::Simple`, `XML::Feed`, `HTML::Parser`
+Ikiwiki requires [[MarkDown]] and the `HTML::Parser` perl module be
+installed, and also uses the following perl modules if available:
+`CGI::Session` `CGI::FormBuilder` (version 3.02.02 or newer)
+`HTML::Template` `Mail::SendMail` `Time::Duration` `Date::Parse`,
+`HTML::Scrubber`, `RPC::XML`, `XML::Simple`, `XML::Feed`
If you want to install from the tarball, you should make sure that the
required perl modules are installed, then run:
posted. [[IkiWikiUsers]] are recommended to subscribe to this page's RSS
feed.
-[[inline pages="news/* !*/Discussion" rootpage="news" show="30"]]
+[[inline pages="news/* and !*/Discussion" rootpage="news" show="30"]]
By the way, some other pages with RSS feeds about ikiwiki include
[[plugins]], [[TODO]] and [[bugs]].
Joey to open up anonymous svn access to this wiki so you can check in the
patches directly).
-[[inline pages="patchqueue/* !patchqueue/done !link(patchqueue/done) !*/Discussion" rootpage="patchqueue" show="30" archive="yes"]]
-
+[[inline pages="patchqueue/* and !patchqueue/done and !link(patchqueue/done) and !*/Discussion" rootpage="patchqueue" show="30" archive="yes"]]
a useful way to find pages that still need to be written, or links that
are written wrong.
-The optional parameter "pages" can be a [[GlobList]] specifying the pages
+The optional parameter "pages" can be a [[PageSpec]] specifying the pages
to search for broken links, default is search them all.
This plugin is included in ikiwiki, but is not enabled by default.
This plugin generates a list of possibly orphaned pages -- pages that no other page
links to.
-The optional parameter "pages" can be a [[GlobList]] specifying the pages
+The optional parameter "pages" can be a [[PageSpec]] specifying the pages
to check for orphans, default is search them all.
Note that it takes [[BackLinks]] into account, but does not count inlining a
Provides a \\[[pagecount ]] [[PreProcessorDirective]] that is replaced with
the total number of pages currently in the wiki.
-The optional parameter "pages" can be a [[GlobList]] specifying the pages
+The optional parameter "pages" can be a [[PageSpec]] specifying the pages
to count, default is to count them all.
This plugin is included in ikiwiki, but is not enabled by default.
\[[tag tech life linux]]
The tags work the same as if you had put a (hidden) [[WikiLink]] on the page
-for each tag, so you can use a [[GlobList]] to link to all pages that are
+for each tag, so you can use a [[PageSpec]] to link to all pages that are
tagged with a given tag, for example. The tags will also show up on blog
entries and at the bottom of the tagged pages, as well as in rss feeds.
* `%IkiWiki::renderedfiles` contains the name of the file rendered by a
page
* `%IkiWiki::pagesources` contains the name of the source file for a page.
-* `%IkiWiki::depends` contains a [[GlobList]] that is used to specify other
+* `%IkiWiki::depends` contains a [[PageSpec]] that is used to specify other
pages that a page depends on. If one of its dependencies is updated, the
page will also get rebuilt.
Many plugins will need to add dependencies to this hash; the best way to do
it is by using the IkiWiki::add_depends function, which takes as its
- parameters the page name and a [[GlobList]] of dependencies to add.
+ parameters the page name and a [[PageSpec]] of dependencies to add.
* `%IkiWiki::forcerebuild` any pages set as the keys to this hash will be
treated as if they're modified and rebuilt.
# 2.0
* Unit test suite (with tests of at least core stuff like
- [[GlobList]]). (status: exists, could of course use more tests)
+ [[PageSpec]]). (status: exists, could of course use more tests)
* [[Plugins]] _(status: done, interface still not quite stable)_
* [[Tags]] _(status: partial)_
* Should have fully working [[todo/utf8]] support. _(status: fair)_
Welcome to ikiwiki's todo list. Link items to [[todo/done]] when done.
-[[inline pages="todo/* !todo/done !link(todo/done) !*/Discussion" rootpage="todo" show="30"]]
+[[inline pages="todo/* and !todo/done and !link(todo/done) and !*/Discussion" rootpage="todo" show="30"]]
----
# Full list of open items:
-[[inline pages="todo/* !todo/done !link(todo/done) !*/Discussion" archive="yes" rss="no"]]
+[[inline pages="todo/* and !todo/done and !link(todo/done) and !*/Discussion" archive="yes" rss="no"]]
recently fixed [[TODO]] items
-[[inline pages="link(todo/done) !todo !*/Discussion" show="10"]]
+[[inline pages="link(todo/done) and !todo and !*/Discussion" show="10"]]
-Need to improve [[globlist]]s, adding more powerful boolean expressions.
+Need to improve globlists, adding more powerful boolean expressions.
The current behavior is to check for negated expressions, and not match if
there are any, then check for normal expressions and match if any match,
This fails if you want to do something like match only pages with tag foo
that are under directory bar. I think we need parens for grouping, and
probably also boolean OR.
+
+[[todo/done]]
> file for some pages; blog pages would use a template different from the
> home page, even if both are managed in the same repository, etc.
-Well, that would probably be fairly easy to add if it used globlists to
+Well, that would probably be fairly easy to add if it used pagespecs to
specify which pages use the non-default template.
Hmm, I think the pagetemplate hook should allow one to get close enough to
+++ /dev/null
-#!/usr/bin/perl
-use warnings;
-use strict;
-use Test::More tests => 13;
-
-BEGIN { use_ok("IkiWiki"); }
-ok(IkiWiki::globlist_match("foo", "foo bar"), "simple list");
-ok(IkiWiki::globlist_match("bar", "foo bar"), "simple list 2");
-ok(IkiWiki::globlist_match("foo", "*"));
-ok(IkiWiki::globlist_match("foo", "f?? !foz"));
-ok(! IkiWiki::globlist_match("foo", "f?? !foo"));
-ok(! IkiWiki::globlist_match("foo", "* !foo"));
-ok(! IkiWiki::globlist_match("foo", "foo !foo"));
-ok(IkiWiki::globlist_match("page", "?ag?"));
-ok(! IkiWiki::globlist_match("page", "?a?g?"));
-ok(! IkiWiki::globlist_match("foo.png", "* !*.*"));
-ok(IkiWiki::globlist_match("foo.png", "*.*"));
-ok(! IkiWiki::globlist_match("foo", "*.*"));
+++ /dev/null
-#!/usr/bin/perl
-use warnings;
-use strict;
-use Test::More tests => 25;
-
-sub same {
- my $a=shift;
- my $b=shift;
- my $match=shift;
-
- my $imatch=(IkiWiki::globlist_match($match, $a) ||
- IkiWiki::globlist_match($match, $b));
- my $cmatch=IkiWiki::globlist_match($match, IkiWiki::globlist_merge($a, $b));
-
- return $imatch == $cmatch;
-}
-
-BEGIN { use_ok("IkiWiki::Render"); }
-
-ok(same("foo", "bar", "foo"), "basic match 1");
-ok(same("foo", "bar", "bar"), "basic match 2");
-ok(same("foo", "bar", "foobar"), "basic failed match");
-ok(same("foo", "!bar", "foo"), "basic match with inversion");
-ok(same("foo", "!bar", "bar"), "basic failed match with inversion");
-ok(same("!foo", "bar", "foo"), "basic failed match with inversion 2");
-ok(same("!foo", "bar", "bar"), "basic match with inversion 2");
-ok(same("!foo", "!bar", "foo"), "double inversion failed match");
-ok(same("!foo", "!bar", "bar"), "double inversion failed match 2");
-ok(same("*", "!bar", "foo"), "glob+inversion match");
-ok(same("*", "!bar", "bar"), "matching glob and matching inversion");
-ok(same("* !foo", "!bar", "bar"), "matching glob and matching inversion");
-ok(same("* !foo", "!bar", "foo"), "matching glob with matching inversion and non-matching inversion");
-ok(same("* !foo", "!foo", "foo"), "matching glob with matching inversion and matching inversion");
-ok(same("b??", "!b??", "bar"), "matching glob and matching inverted glob");
-ok(same("f?? !f??", "!bar", "bar"), "matching glob and matching inverted glob");
-ok(same("b??", "!b?z", "bar"), "matching glob and non-matching inverted glob");
-ok(same("f?? !f?z", "!bar", "bar"), "matching glob and non-matching inverted glob");
-ok(same("!foo bar baz", "!bar", "bar"), "matching list and matching inversion");
-ok(IkiWiki::globlist_match("foo/Discussion",
- IkiWiki::globlist_merge("* !*/Discussion", "*/Discussion")), "should match");
-ok(same("* !*/Discussion", "*/Discussion", "foo/Discussion"), "Discussion merge 1");
-ok(same("*/Discussion", "* !*/Discussion", "foo/Discussion"), "Discussion merge 2");
-ok(same("*/Discussion !*/bar", "*/bar !*/Discussion", "foo/Discussion"), "bidirectional merge 1");
-ok(same("*/Discussion !*/bar", "*/bar !*/Discussion", "foo/bar"), "bidirectional merge 2");
--- /dev/null
+#!/usr/bin/perl
+use warnings;
+use strict;
+use Test::More tests => 20;
+
+BEGIN { use_ok("IkiWiki"); }
+
+ok(IkiWiki::pagespec_match("foo", "*"));
+ok(IkiWiki::pagespec_match("page", "?ag?"));
+ok(! IkiWiki::pagespec_match("page", "?a?g?"));
+ok(IkiWiki::pagespec_match("foo.png", "*.*"));
+ok(! IkiWiki::pagespec_match("foo", "*.*"));
+ok(IkiWiki::pagespec_match("foo", "foo or bar"), "simple list");
+ok(IkiWiki::pagespec_match("bar", "foo or bar"), "simple list 2");
+ok(IkiWiki::pagespec_match("foo", "f?? and !foz"));
+ok(! IkiWiki::pagespec_match("foo", "f?? and !foo"));
+ok(! IkiWiki::pagespec_match("foo", "* and !foo"));
+ok(! IkiWiki::pagespec_match("foo", "foo and !foo"));
+ok(! IkiWiki::pagespec_match("foo.png", "* and !*.*"));
+
+# old style globlists
+ok(IkiWiki::pagespec_match("foo", "foo bar"), "simple list");
+ok(IkiWiki::pagespec_match("bar", "foo bar"), "simple list 2");
+ok(IkiWiki::pagespec_match("foo", "f?? !foz"));
+ok(! IkiWiki::pagespec_match("foo", "f?? !foo"));
+ok(! IkiWiki::pagespec_match("foo", "* !foo"));
+ok(! IkiWiki::pagespec_match("foo", "foo !foo"));
+ok(! IkiWiki::pagespec_match("foo.png", "* !*.*"));
--- /dev/null
+#!/usr/bin/perl
+use warnings;
+use strict;
+use Test::More tests => 25;
+
+sub same {
+ my $a=shift;
+ my $b=shift;
+ my $match=shift;
+
+ my $imatch=(IkiWiki::pagespec_match($match, $a) ||
+ IkiWiki::pagespec_match($match, $b));
+ my $cmatch=IkiWiki::pagespec_match($match, IkiWiki::pagespec_merge($a, $b));
+
+ return $imatch == $cmatch;
+}
+
+BEGIN { use_ok("IkiWiki"); }
+
+ok(same("foo", "bar", "foo"), "basic match 1");
+ok(same("foo", "bar", "bar"), "basic match 2");
+ok(same("foo", "bar", "foobar"), "basic failed match");
+ok(same("foo", "!bar", "foo"), "basic match with inversion");
+ok(same("foo", "!bar", "bar"), "basic failed match with inversion");
+ok(same("!foo", "bar", "foo"), "basic failed match with inversion 2");
+ok(same("!foo", "bar", "bar"), "basic match with inversion 2");
+ok(same("!foo", "!bar", "foo"), "double inversion failed match");
+ok(same("!foo", "!bar", "bar"), "double inversion failed match 2");
+ok(same("*", "!bar", "foo"), "glob+inversion match");
+ok(same("*", "!bar", "bar"), "matching glob and matching inversion");
+ok(same("* !foo", "!bar", "bar"), "matching glob and matching inversion");
+ok(same("* !foo", "!bar", "foo"), "matching glob with matching inversion and non-matching inversion");
+ok(same("* !foo", "!foo", "foo"), "matching glob with matching inversion and matching inversion");
+ok(same("b??", "!b??", "bar"), "matching glob and matching inverted glob");
+ok(same("f?? !f??", "!bar", "bar"), "matching glob and matching inverted glob");
+ok(same("b??", "!b?z", "bar"), "matching glob and non-matching inverted glob");
+ok(same("f?? !f?z", "!bar", "bar"), "matching glob and non-matching inverted glob");
+ok(same("!foo bar baz", "!bar", "bar"), "matching list and matching inversion");
+ok(IkiWiki::pagespec_match("foo/Discussion",
+ IkiWiki::pagespec_merge("* !*/Discussion", "*/Discussion")), "should match");
+ok(same("* !*/Discussion", "*/Discussion", "foo/Discussion"), "Discussion merge 1");
+ok(same("*/Discussion", "* !*/Discussion", "foo/Discussion"), "Discussion merge 2");
+ok(same("*/Discussion !*/bar", "*/bar !*/Discussion", "foo/Discussion"), "bidirectional merge 1");
+ok(same("*/Discussion !*/bar", "*/bar !*/Discussion", "foo/bar"), "bidirectional merge 2");