Instead of having files foo.html "in front of" foo/, I prefer to have
-foo/index.html. This patch allows that. Specifically, foo/index.type
-is translated to $links{'foo/'}, and bestlink looks for either "foo" or
-"foo/" when linking to pages. There are other miscellaneous changes that
-go with that:
-
-1. change the `cgi_editpage` `@page_locs` code so that creating foo from
- a/b/c prefers a/b/foo and then a/b/c/foo, but if creating foo from a/b/c/,
- then prefer a/b/c/foo. I'm not really sure why the original was doing what
- it did (why trim terminal `/` if no pages end in `/`?), so this part might
- break something.
-2. tweak things so that index.rss and index.atom are generated if inlining
- from 'foo/'
-2. backlinks from "foo/bar" to "foo/" trim common prefixes as long as there
- would be something left when the trimming is done (i.e. don't trim "foo/")
-3. parentlinks for "foo/" are the same as for "foo", except one directory
- higher
-4. rewrite parentlinks so that bestlink is called at each level
-5. basename("foo/") => basename("foo")
-6. links to "foo/" are translated to "foo/index.html" rather than "foo/.html".
- (Links to "foo/" might be preferred, but that causes an infinite loop in
- writefile, because apparently dirname("foo/") == "foo/" on my system for
- reasons that aren't clear to me.)
-7. pagetitle("foo/") => pagetitle("foo")
-8. clip the final slash when matching a relative pagespec, even if there are
- no characters after it (otherwise inlining "./a" from "foo/" gets
- translated to "foo//a")
-
-In case whitespace gets garbled, I'm also leaving a copy of the patch on
-[my site](http://ikidev.betacantrips.com/patches/index.patch). It should apply
-cleanly to a freshly unpacked ikiwiki-1.42. You can also see it in action
-[here](http://ikidev.betacantrips.com/one/). --Ethan
-
- diff -urX ignorepats ikiclean/IkiWiki/CGI.pm ikidev/IkiWiki/CGI.pm
- --- ikiclean/IkiWiki/CGI.pm 2007-02-11 21:40:32.419641000 -0800
- +++ ikidev/IkiWiki/CGI.pm 2007-02-11 21:54:36.252357000 -0800
- @@ -408,8 +408,8 @@
- @page_locs=$best_loc=$page;
- }
- else {
- - my $dir=$from."/";
- - $dir=~s![^/]+/+$!!;
- + my $dir=$from;
- + $dir=~s![^/]+$!!;
-
- if ((defined $form->field('subpage') && length $form->field('subpage')) ||
- $page eq gettext('discussion')) {
- @@ -420,7 +420,9 @@
- }
-
- push @page_locs, $dir.$page;
- - push @page_locs, "$from/$page";
- + if ($dir ne $from){ # i.e. $from not a directory
- + push @page_locs, "$from/$page";
- + }
- while (length $dir) {
- $dir=~s![^/]+/+$!!;
- push @page_locs, $dir.$page;
- diff -urX ignorepats ikiclean/IkiWiki/Plugin/inline.pm ikidev/IkiWiki/Plugin/inline.pm
- --- ikiclean/IkiWiki/Plugin/inline.pm 2007-02-11 21:40:31.996007000 -0800
- +++ ikidev/IkiWiki/Plugin/inline.pm 2007-02-11 21:54:36.008358000 -0800
- @@ -110,8 +110,8 @@
-
- add_depends($params{page}, $params{pages});
-
- - my $rssurl=rsspage(basename($params{page}));
- - my $atomurl=atompage(basename($params{page}));
- + my $rssurl=basename(rsspage($params{page}));
- + my $atomurl=basename(atompage($params{page}));
- my $ret="";
-
- if (exists $params{rootpage} && $config{cgiurl}) {
- @@ -285,14 +285,18 @@
-
- sub rsspage ($) { #{{{
- my $page=shift;
- + $page = htmlpage($page);
- + $page =~s/\.html$/.rss/;
-
- - return $page.".rss";
- + return $page;
- } #}}}
-
- sub atompage ($) { #{{{
- my $page=shift;
- + $page = htmlpage($page);
- + $page =~s/\.html$/.atom/;
-
- - return $page.".atom";
- + return $page;
- } #}}}
-
- sub genfeed ($$$$@) { #{{{
- diff -urX ignorepats ikiclean/IkiWiki/Render.pm ikidev/IkiWiki/Render.pm
- --- ikiclean/IkiWiki/Render.pm 2007-02-11 21:40:32.413641000 -0800
- +++ ikidev/IkiWiki/Render.pm 2007-02-11 21:54:36.246356000 -0800
- @@ -40,6 +40,7 @@
- my $dir;
- 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
- defined $dir &&
- + $p_trimmed=~m/^\Q$dir\E(?:.)/ &&
- $p_trimmed=~s/^\Q$dir\E// &&
- $page_trimmed=~s/^\Q$dir\E//;
-
- @@ -57,10 +58,18 @@
- my $path="";
- my $skip=1;
- return if $page eq 'index'; # toplevel
- - foreach my $dir (reverse split("/", $page)) {
- + if ($page =~ m{/$}){
- + $page =~ s{/$}{};
- + $path="../";
- + }
- +
- + while ($page =~ m!([^/]+)$!) {
- + my $last = $1;
- + $page =~ s!/?[^/]+$!!;
- if (! $skip) {
- $path.="../";
- - unshift @ret, { url => $path.htmlpage($dir), page => pagetitle($dir) };
- + my $target = abs2rel(htmlpage(bestlink($page, $last)), $page);
- + unshift @ret, { url => $path.$target, page => pagetitle($last) };
- }
- else {
- $skip=0;
- diff -urX ignorepats ikiclean/IkiWiki.pm ikidev/IkiWiki.pm
- --- ikiclean/IkiWiki.pm 2007-02-11 21:40:35.118406000 -0800
- +++ ikidev/IkiWiki.pm 2007-02-11 22:22:49.146071000 -0800
- @@ -188,6 +188,7 @@
- sub basename ($) { #{{{
- my $file=shift;
-
- + $file=~s!/$!!;
- $file=~s!.*/+!!;
- return $file;
- } #}}}
- @@ -214,12 +215,14 @@
- my $type=pagetype($file);
- my $page=$file;
- $page=~s/\Q.$type\E*$// if defined $type;
- + $page=~s#index$## if $page=~m{/index$};
- return $page;
- } #}}}
-
- sub htmlpage ($) { #{{{
- my $page=shift;
-
- + return $page."index.html" if $page=~m{/$};
- return $page.".html";
- } #}}}
-
- @@ -307,6 +310,7 @@
- my $page=shift;
- my $link=shift;
-
- + $page =~ s!/$!!;
- my $cwd=$page;
- if ($link=~s/^\/+//) {
- # absolute links
- @@ -321,6 +325,9 @@
- if (exists $links{$l}) {
- return $l;
- }
- + if (exists $links{$l.'/'}){
- + return $l.'/';
- + }
- elsif (exists $pagecase{lc $l}) {
- return $pagecase{lc $l};
- }
- @@ -351,6 +358,7 @@
- $page=~s/__(\d+)__/&#$1;/g;
- }
- $page=~y/_/ /;
- + $page=~s!/$!!;
-
- return $page;
- } #}}}
- @@ -879,7 +887,7 @@
-
- # relative matching
- if ($glob =~ m!^\./!) {
- - $from=~s!/?[^/]+$!!;
- + $from=~s!/?[^/]*$!!;
- $glob=~s!^\./!!;
- $glob="$from/$glob" if length $from;
- }
+foo/index.html.
I independently implemented a similar, but smaller patch.
(It's smaller because I only care about rendering; not CGI, for example.)
Also note that an initial "index" is ignored. I.e. a
page "A/B/index.html" is treated as "A/B".
-> This is actually a pretty cool hack. I'll have to think about
-> whether I like it better than my way though :) --Ethan
+> Actually, your patch is shorter because it's more elegant and better :)
+> I'm withdrawing my old patch, because yours is much more in line with
+> ikiwiki's design and architecture.
+> I would like to make one suggestion to your patch, which is:
+
+ diff -urX ignorepats clean-ikidev/IkiWiki/Plugin/inline.pm ikidev/IkiWiki/Plugin/inline.pm
+ --- clean-ikidev/IkiWiki/Plugin/inline.pm 2007-02-25 12:26:54.099113000 -0800
+ +++ ikidev/IkiWiki/Plugin/inline.pm 2007-02-25 14:55:21.163340000 -0800
+ @@ -154,7 +154,7 @@
+ $link=htmlpage($link) if defined $type;
+ $link=abs2rel($link, dirname($params{destpage}));
+ $template->param(pageurl => $link);
+ - $template->param(title => pagetitle(basename($page)));
+ + $template->param(title => titlename($page));
+ $template->param(ctime => displaytime($pagectime{$page}));
+
+ if ($actions) {
+ @@ -318,7 +318,7 @@
+ my $pcontent = absolute_urls(get_inline_content($p, $page), $url);
+
+ $itemtemplate->param(
+ - title => pagetitle(basename($p), 1),
+ + title => titlename($p, 1),
+ url => $u,
+ permalink => $u,
+ date_822 => date_822($pagectime{$p}),
+ diff -urX ignorepats clean-ikidev/IkiWiki/Render.pm ikidev/IkiWiki/Render.pm
+ --- clean-ikidev/IkiWiki/Render.pm 2007-02-25 12:26:54.745833000 -0800
+ +++ ikidev/IkiWiki/Render.pm 2007-02-25 14:54:01.564715000 -0800
+ @@ -110,7 +110,7 @@
+ $template->param(
+ title => $page eq 'index'
+ ? $config{wikiname}
+ - : pagetitle(basename($page)),
+ + : titlename($page),
+ wikiname => $config{wikiname},
+ parentlinks => [parentlinks($page)],
+ content => $content,
+ diff -urX ignorepats clean-ikidev/IkiWiki.pm ikidev/IkiWiki.pm
+ --- clean-ikidev/IkiWiki.pm 2007-02-25 12:26:58.812850000 -0800
+ +++ ikidev/IkiWiki.pm 2007-02-25 15:05:22.328852000 -0800
+ @@ -192,6 +192,12 @@
+ return $untainted;
+ } #}}}
+
+ +sub titlename($;@) { #{{{
+ + my $page = shift;
+ + $page =~ s!/index$!!;
+ + return pagetitle(basename($page), @_);
+ +} #}}}
+ +
+ sub basename ($) { #{{{
+ my $file=shift;
+
+
+> This way foo/index gets "foo" as its title, not "index". --Ethan
---
----
First pass over Tumov's patch -- which doesn't cleanly apply anymore, so
-I'll attach an updated and slightly modified version below.
-
-* `urlto()` is O(N) to the number of pages in the wiki, which leads to
- O(N^2) behavior, which could be a scalability problem. This happens because
- of the lookup for `$to` in `%renderedfiles`, which shouldn't be necessary
- most of the time. Couldn't it just be required that `$to` be a html page
- name on input? Or require it be a non-html page name and always run
- htmlpage on it.
-
- > Perhaps it would be possible to require that, but it seems like a
- > very artificial restriction. The renderedfiles search is just a
- > copy-paste from htmllink, and I'm no perl (or ikiwiki internals)
- > expert... maybe there would be a faster way to do the check whether
- > name translation is needed? No more than O(log n) steps should be
- > needed for a simple search, after all, and maybe there would be shortcuts
- > for even constant-time (in n) checks. --[[tuomov]]
-
- >> Ah, so much easier to critque other people's code than your own.
- >> You're right, this is a general problem, and I can get it to log n
- >> if I really want to. --[[Joey]]
+I'll attach an updated and modified version below. --[[Joey]]
* As we discussed in email, this will break handling of `foo/index.mdwn`
pages. Needs to be changed to generate `foo/index/index.html` for such
>>> rendered as `foo/index.html`. The easiest and cleanest way to fix this, is to simply
>>> not handle `index` in such a special manner -- except for the top-level one. --[[tuomov]]
+ >>>> Oh, I see, this patch doesn't address wanting to use foo/index.mdwn as
+ >>>> an input page. Hmm. --Ethan
+
+ >>>>> No, it doesn't. I originally also was after that, but after discussing the
+ >>>>> complexities of supporting that with Joey, came up with this simpler scheme
+ >>>>> without many of those issues. It is the output that I primarily care about, anyway,
+ >>>>> and I do, in fact, find the present input file organisation quite nice. The output
+ >>>>> locations just aren't very good for conversion of an existing site to ikiwiki, and do
+ >>>>> make for rather ugly URLs with the .html extensions. (I do often type some URLs
+ >>>>> out of memory, when they're gone from the browser's completion history, and the
+ >>>>> .html makes that more laboursome.)
+
+ >>>>>> I support your decision, but now this wiki page serves two different patches :).
+ >>>>>> Can we split them somehow?
+ >>>>>> What are the complexities involved?
+ >>>>>> I think I overcomplicated it a little with my patch, and Per Bothner's gets
+ >>>>>> much closer to the heart of it. --Ethan
+
* This does make the resulting wikis much less browsable directly on the
filesystem, since `dir` to `dir/index.html` conversion is only handled by web
servers and so you end up browsing to a directory index all the time.
>>> unless you also want to serve the same copy over the Web, which I
>>> doubt. --[[tuomov]]
+* I suggest keeping the links using foo/index.html in the html file, but use
+ JavaScript to fix the links onload time - but only if the protocol is http or https.
+ This provides nice links without the "index.html" when served by a
+ web server, but degrades nicely when using a file: url, or when JavaScript
+ is disabled. --Per Bothner
+
+ function fixLinks ( ) {
+ var scheme = location.protocol;
+ if (scheme!="http:" && scheme!="https:") return;
+ var links = document.getElementsByTagName("a");
+ for (var i = links.length; --i >= 0; )
+ links[i].href = links[i].href.replace(/[/]index.html/,"");
+ }
+
* Some of the generated links are missing the trailing / , which is
innefficient since it leads to a http redirect when clicking on that
link. Seems to be limited to ".." links, and possibly only to
if ( !length $to ) {
return baseurl($from);
}
-
+
+ >> Indeed, this brings the number of abs2rels closer to par, as well
+ >> as fixing the .. links. --[[Joey]]
* It calles abs2rel about 16% more often with the patch, which makes it
a bit slower, since abs2rel is not very efficient. (This omits abs2rel
>> good time to make such a changes. Not required to accept this patch
>> though.
-* `aggregate.pm` uses htmlpage in a way that breaks with its new behavior.
- It will need to be changed as follows:
+ >>> [...] in fact, all uses of htmlpage in the plugins are used to
+ >>> construct an absolute address: the absolute url in most cases, so an `absurl`
+ >>> call could be added to be used instead of htmlpage
+ >>> --[[tuomov]]
-<pre>
-Index: aggregate.pm
-===================================================================
---- aggregate.pm (revision 2700)
-+++ aggregate.pm (working copy)
-@@ -320,7 +320,7 @@
- # NB: This doesn't check for path length limits.
- eval q{use POSIX};
- my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
-- if (defined $max && length(htmlpage($page)) >= $max) {
-+ if (defined $max && length(htmlfn($page)) >= $max) {
- $c="";
- $page=$feed->{dir}."/item";
- while (exists $IkiWiki::pagecase{lc $page.$c} ||
-@@ -356,7 +356,7 @@
- if (ref $feed->{tags}) {
- $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
- }
-- writefile(htmlpage($guid->{page}), $config{srcdir},
-+ writefile(htmlfn($guid->{page}), $config{srcdir},
- $template->output);
-
- # Set the mtime, this lets the build process get the right creation
-@@ -434,4 +434,8 @@
- return "$config{srcdir}/".htmlpage($page);
- } #}}}
-
-+sub htmlfn ($) { #{{{
-+ return shift().".html";
-+} #}}}
-+
- 1
-</pre>
+ >>>> Or it could use urlto("index", $page) instead. --[[Joey]]
-* `linkmap.pm` uses `htmlpage` to construct a link and should probably be
- changed like this (untested):
+ >>>>> That is, however, a relative URL, and maybe an absolute one
+ >>>>> is wanted. Perhaps `urlto($targetpage)` should return the
+ >>>>> absolute version --[[tuomov]]
-<pre>
-Index: linkmap.pm
-===================================================================
---- linkmap.pm (revision 2700)
-+++ linkmap.pm (working copy)
-@@ -50,8 +50,7 @@
- foreach my $item (keys %links) {
- if (pagespec_match($item, $params{pages}, $params{page})) {
- my $link=htmlpage($item);
-- $link=IkiWiki::abs2rel($link, IkiWiki::dirname($params{page}));
-- $mapitems{$item}=$link;
-+ $mapitems{$item}=urlto($link, $params{destpage});
- }
- }
-</pre>
-
-* `inline.pm` uses htmlpage and `abs2rel` to generate a link, and probably
- needs to be changed to either use `urlto` or to call `beautify_url` like
- htmllink does. This might work:
+* > and something else in the
+ > aggregate plugin (above), that I also think isn't what's wanted:
+ > aren't `foo.html` pages also "rendered", so that they get moved as `foo/index.html`?
+ > --[[tuomov]]
-<pre>
-Index: inline.pm
-===================================================================
---- inline.pm (revision 2700)
-+++ inline.pm (working copy)
-@@ -150,10 +150,7 @@
- # Don't use htmllink because this way the
- # title is separate and can be overridden by
- # other plugins.
-- my $link=bestlink($params{page}, $page);
-- $link=htmlpage($link) if defined $type;
-- $link=abs2rel($link, dirname($params{destpage}));
-- $template->param(pageurl => $link);
-+ $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage}));
- $template->param(title => pagetitle(basename($page)));
- $template->param(ctime => displaytime($pagectime{$page}));
-</pre>
+ >> Yes, the aggregate plugin will save the files as foo.html in the
+ >> sourcedir, and that will result in foo/index.html in the web site, same
+ >> as any other page. --[[Joey]]
* `img.pm` makes some assumptions about name of the page that will be
linking to the image, which are probably broken.
* The changes to htmlpage's behavior probably call for the plugin
interface version number to be changed.
---[[Joey]]
-
-Updated version of Tumov's patch follows:
+Latest version of my patch... with most of the stuff that's been discussed, including `targetpage`.
+Also available [here](http://iki.fi/tuomov/use_dirs-20070221.diff). (BTW, this posting, applying, and
+updating of plain-old-diffs containing all the previous changes is starting to be painful. Reminds
+me why I use darcs..) --[[tuomov]]
<pre>
+Index: IkiWiki.pm
+===================================================================
+--- IkiWiki.pm (revision 2806)
++++ IkiWiki.pm (working copy)
+@@ -14,7 +14,7 @@
+ use Exporter q{import};
+ our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
+ bestlink htmllink readfile writefile pagetype srcfile pagename
+- displaytime will_render gettext
++ displaytime will_render gettext urlto targetpage
+ %config %links %renderedfiles %pagesources);
+ our $VERSION = 1.02; # plugin interface version, next is ikiwiki version
+ our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
+@@ -73,6 +73,7 @@
+ sslcookie => 0,
+ httpauth => 0,
+ userdir => "",
++ usedirs => 0
+ } #}}}
+
+ sub checkconfig () { #{{{
+@@ -224,10 +225,21 @@
+ return $page;
+ } #}}}
+
++sub targetpage ($$) { #{{{
++ my $page=shift;
++ my $ext=shift;
++
++ if (! $config{usedirs} || $page =~ /^index$/ ) {
++ return $page.".".$ext;
++ } else {
++ return $page."/index.".$ext;
++ }
++} #}}}
++
+ sub htmlpage ($) { #{{{
+ my $page=shift;
+-
+- return $page.".html";
++
++ return targetpage($page, "html");
+ } #}}}
+
+ sub srcfile ($) { #{{{
+@@ -393,6 +405,7 @@
+
+ return "$config{url}/" if ! defined $page;
+
++ $page=htmlpage($page);
+ $page=~s/[^\/]+$//;
+ $page=~s/[^\/]+\//..\//g;
+ return $page;
+@@ -422,6 +435,32 @@
+ $config{timeformat}, localtime($time)));
+ } #}}}
+
++sub beautify_url ($) { #{{{
++ my $url=shift;
++
++ $url =~ s!/index.html$!/!;
++ $url =~ s!^$!./!; # Browsers don't like empty links...
++
++ return $url;
++} #}}}
++
++sub urlto ($$) { #{{{
++ my $to=shift;
++ my $from=shift;
++
++ if (! length $to) {
++ return beautify_url(baseurl($from));
++ }
++
++ if (! grep { $_ eq $to } map { @{$_} } values %renderedfiles) {
++ $to=htmlpage($to);
++ }
++
++ my $link = abs2rel($to, dirname(htmlpage($from)));
++
++ return beautify_url($link);
++} #}}}
++
+ sub htmllink ($$$;@) { #{{{
+ my $lpage=shift; # the page doing the linking
+ my $page=shift; # the page that will contain the link (different for inline)
+@@ -457,7 +496,8 @@
+ "\">?</a>$linktext</span>"
+ }
+
+- $bestlink=abs2rel($bestlink, dirname($page));
++ $bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
++ $bestlink=beautify_url($bestlink);
+
+ if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
+ return "<img src=\"$bestlink\" alt=\"$linktext\" />";
Index: IkiWiki/Render.pm
===================================================================
---- IkiWiki/Render.pm (revision 2700)
+--- IkiWiki/Render.pm (revision 2806)
+++ IkiWiki/Render.pm (working copy)
@@ -32,8 +32,8 @@
my @links;
Index: IkiWiki/Plugin/inline.pm
===================================================================
---- IkiWiki/Plugin/inline.pm (revision 2700)
+--- IkiWiki/Plugin/inline.pm (revision 2806)
+++ IkiWiki/Plugin/inline.pm (working copy)
@@ -110,8 +110,8 @@
my $ret="";
if (exists $params{rootpage} && $config{cgiurl}) {
-@@ -151,8 +151,7 @@
+@@ -150,10 +150,7 @@
+ # Don't use htmllink because this way the
# title is separate and can be overridden by
# other plugins.
- my $link=bestlink($params{page}, $page);
+- my $link=bestlink($params{page}, $page);
- $link=htmlpage($link) if defined $type;
- $link=abs2rel($link, dirname($params{destpage}));
-+ $link=urlto($link, $params{destpage});
- $template->param(pageurl => $link);
+- $template->param(pageurl => $link);
++ $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage}));
$template->param(title => pagetitle(basename($page)));
$template->param(ctime => displaytime($pagectime{$page}));
-@@ -205,15 +204,17 @@
+
+@@ -205,15 +202,17 @@
}
if ($rss) {
genfeed("atom", $atomurl, $desc, $params{page}, @list));
$toping{$params{page}}=1 unless $config{rebuild};
$feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/atom+xml" title="Atom" href="$atomurl" />};
-@@ -288,16 +289,25 @@
+@@ -288,16 +287,21 @@
return $content;
} #}}}
+-sub rsspage ($) { #{{{
+sub basepage ($) { #{{{
-+ my $page=shift;
+ my $page=shift;
+
+ $page=htmlpage($page);
+ $page =~ s/\.html$//;
+
+ return $page;
+} #}}}
-+
- sub rsspage ($) { #{{{
- my $page=shift;
- return $page.".rss";
-+ return basepage($page).".rss";
++sub rsspage ($) { #{{{
++ return targetpage(shift, "rss");
} #}}}
sub atompage ($) { #{{{
- my $page=shift;
-
+- my $page=shift;
+-
- return $page.".atom";
-+ return basepage($page).".atom";
++ return targetpage(shift, "atom");
} #}}}
sub genfeed ($$$$@) { #{{{
-Index: ikiwiki.in
+Index: IkiWiki/Plugin/aggregate.pm
===================================================================
---- ikiwiki.in (revision 2700)
-+++ ikiwiki.in (working copy)
-@@ -46,6 +46,7 @@
- "sslcookie!" => \$config{sslcookie},
- "httpauth!" => \$config{httpauth},
- "userdir=s" => \$config{userdir},
-+ "usedirs!" => \$config{usedirs},
- "exclude=s@" => sub {
- push @{$config{wiki_file_prune_regexps}}, $_[1];
- },
+--- IkiWiki/Plugin/aggregate.pm (revision 2806)
++++ IkiWiki/Plugin/aggregate.pm (working copy)
+@@ -320,7 +320,7 @@
+ # NB: This doesn't check for path length limits.
+ eval q{use POSIX};
+ my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
+- if (defined $max && length(htmlpage($page)) >= $max) {
++ if (defined $max && length(htmlfn($page)) >= $max) {
+ $c="";
+ $page=$feed->{dir}."/item";
+ while (exists $IkiWiki::pagecase{lc $page.$c} ||
+@@ -356,7 +356,7 @@
+ if (ref $feed->{tags}) {
+ $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
+ }
+- writefile(htmlpage($guid->{page}), $config{srcdir},
++ writefile(htmlfn($guid->{page}), $config{srcdir},
+ $template->output);
+
+ # Set the mtime, this lets the build process get the right creation
+@@ -434,4 +434,8 @@
+ return "$config{srcdir}/".htmlpage($page);
+ } #}}}
+
++sub htmlfn ($) { #{{{
++ return shift().".html";
++} #}}}
++
+ 1
+Index: IkiWiki/Plugin/linkmap.pm
+===================================================================
+--- IkiWiki/Plugin/linkmap.pm (revision 2806)
++++ IkiWiki/Plugin/linkmap.pm (working copy)
+@@ -49,9 +49,7 @@
+ my %mapitems = ();
+ foreach my $item (keys %links) {
+ if (pagespec_match($item, $params{pages}, $params{page})) {
+- my $link=htmlpage($item);
+- $link=IkiWiki::abs2rel($link, IkiWiki::dirname($params{page}));
+- $mapitems{$item}=$link;
++ $mapitems{$item}=urlto($item, $params{destpage});
+ }
+ }
+
Index: doc/usage.mdwn
===================================================================
---- doc/usage.mdwn (revision 2700)
+--- doc/usage.mdwn (revision 2806)
+++ doc/usage.mdwn (working copy)
@@ -244,6 +244,10 @@
Enable [[w3mmode]], which allows w3m to use ikiwiki as a local CGI script,
Index: doc/plugins/write.mdwn
===================================================================
---- doc/plugins/write.mdwn (revision 2700)
+--- doc/plugins/write.mdwn (revision 2806)
+++ doc/plugins/write.mdwn (working copy)
@@ -412,6 +412,10 @@
ikiwiki's support for revision control systems also uses pluggable perl
Index: doc/ikiwiki.setup
===================================================================
---- doc/ikiwiki.setup (revision 2700)
+--- doc/ikiwiki.setup (revision 2806)
+++ doc/ikiwiki.setup (working copy)
@@ -94,6 +94,8 @@
syslog => 0,
# To add plugins, list them here.
#add_plugins => [qw{goodstuff openid search wikitext camelcase
-Index: IkiWiki.pm
-===================================================================
---- IkiWiki.pm (revision 2700)
-+++ IkiWiki.pm (working copy)
-@@ -14,7 +14,7 @@
- use Exporter q{import};
- our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
- bestlink htmllink readfile writefile pagetype srcfile pagename
-- displaytime will_render gettext
-+ displaytime will_render gettext urlto
- %config %links %renderedfiles %pagesources);
- our $VERSION = 1.02; # plugin interface version, next is ikiwiki version
- our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
-@@ -72,6 +72,7 @@
- sslcookie => 0,
- httpauth => 0,
- userdir => "",
-+ usedirs => 0
- } #}}}
-
- sub checkconfig () { #{{{
-@@ -226,7 +227,11 @@
- sub htmlpage ($) { #{{{
- my $page=shift;
-
-- return $page.".html";
-+ if (! $config{usedirs} || $page =~ /^index$/ || $page =~ /\/index$/) {
-+ return $page.".html";
-+ } else {
-+ return $page."/index.html";
-+ }
- } #}}}
-
- sub srcfile ($) { #{{{
-@@ -390,6 +395,7 @@
-
- return "$config{url}/" if ! defined $page;
-
-+ $page=htmlpage($page);
- $page=~s/[^\/]+$//;
- $page=~s/[^\/]+\//..\//g;
- return $page;
-@@ -419,6 +425,29 @@
- $config{timeformat}, localtime($time)));
- } #}}}
-
-+sub beautify_url ($) { #{{{
-+ my $url=shift;
-+
-+ $url =~ s!/index.html$!/!;
-+ $url =~ s!^$!./!; # Browsers don't like empty links...
-+
-+ return $url;
-+} #}}}
-+
-+sub urlto ($$) { #{{{
-+ my $to=shift;
-+ my $from=shift;
-+
-+ if (length $to &&
-+ ! grep { $_ eq $to } map { @{$_} } values %renderedfiles) {
-+ $to=htmlpage($to);
-+ }
-+
-+ my $link = abs2rel($to, dirname(htmlpage($from)));
-+
-+ return beautify_url($link);
-+} #}}}
-+
- sub htmllink ($$$;@) { #{{{
- my $lpage=shift; # the page doing the linking
- my $page=shift; # the page that will contain the link (different for inline)
-@@ -454,7 +483,8 @@
- "\">?</a>$linktext</span>"
- }
-
-- $bestlink=abs2rel($bestlink, dirname($page));
-+ $bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
-+ $bestlink=beautify_url($bestlink);
-
- if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
- return "<img src=\"$bestlink\" alt=\"$linktext\" />";
</pre>