-Instead of having files foo.html "in front of" foo/, I prefer to have foo/index.html. This patch allows that. Specifically, foo/index.type is translated to $links{'foo/'}, and bestlink looks for either "foo" or "foo/" when linking to pages. There are other miscellaneous changes that go with that:
+Instead of having files foo.html "in front of" foo/, I prefer to have
+foo/index.html. This patch allows that. Specifically, foo/index.type
+is translated to $links{'foo/'}, and bestlink looks for either "foo" or
+"foo/" when linking to pages. There are other miscellaneous changes that
+go with that:
-1. change the `cgi_editpage` `@page_locs` code so that creating foo from a/b/c prefers a/b/foo and then a/b/c/foo, but if creating foo from a/b/c/, then prefer a/b/c/foo. I'm not really sure why the original was doing what it did (why trim terminal `/` if no pages end in `/`?), so this part might break something.
-2. backlinks from "foo/bar" to "foo/" trim common prefixes as long as there would be something left when the trimming is done (i.e. don't trim "foo/")
-3. parentlinks for "foo/" are the same as for "foo", except one directory higher
+1. change the `cgi_editpage` `@page_locs` code so that creating foo from
+ a/b/c prefers a/b/foo and then a/b/c/foo, but if creating foo from a/b/c/,
+ then prefer a/b/c/foo. I'm not really sure why the original was doing what
+ it did (why trim terminal `/` if no pages end in `/`?), so this part might
+ break something.
+2. tweak things so that index.rss and index.atom are generated if inlining
+ from 'foo/'
+2. backlinks from "foo/bar" to "foo/" trim common prefixes as long as there
+ would be something left when the trimming is done (i.e. don't trim "foo/")
+3. parentlinks for "foo/" are the same as for "foo", except one directory
+ higher
4. rewrite parentlinks so that bestlink is called at each level
5. basename("foo/") => basename("foo")
-6. links to "foo/" are translated to "foo/index.html" rather than "foo/.html". (Links to "foo/" might be preferred, but that causes an infinite loop in writefile, because apparently dirname("foo/") == "foo/" on my system for reasons that aren't clear to me.)
+6. links to "foo/" are translated to "foo/index.html" rather than "foo/.html".
+ (Links to "foo/" might be preferred, but that causes an infinite loop in
+ writefile, because apparently dirname("foo/") == "foo/" on my system for
+ reasons that aren't clear to me.)
7. pagetitle("foo/") => pagetitle("foo")
+8. clip the final slash when matching a relative pagespec, even if there are
+ no characters after it (otherwise inlining "./a" from "foo/" gets
+ translated to "foo//a")
-In case whitespace gets garbled, I'm also leaving a copy of the patch on [my site](http://ikidev.betacantrips.com/patches/index.patch). It should apply cleanly to a freshly unpacked ikiwiki-1.40. You can also see it in action [here](http://ikidev.betacantrips.com/one/). --Ethan
+In case whitespace gets garbled, I'm also leaving a copy of the patch on
+[my site](http://ikidev.betacantrips.com/patches/index.patch). It should apply
+cleanly to a freshly unpacked ikiwiki-1.42. You can also see it in action
+[here](http://ikidev.betacantrips.com/one/). --Ethan
- diff -urx .svn -x doc -x '*.po' -x '*.pot' ikiclean/IkiWiki/CGI.pm ikidev/IkiWiki/CGI.pm
- --- ikiclean/IkiWiki/CGI.pm 2007-01-17 22:11:41.794805000 -0800
- +++ ikidev/IkiWiki/CGI.pm 2007-01-17 21:43:33.750363000 -0800
- @@ -400,8 +400,8 @@
+ diff -urX ignorepats ikiclean/IkiWiki/CGI.pm ikidev/IkiWiki/CGI.pm
+ --- ikiclean/IkiWiki/CGI.pm 2007-02-11 21:40:32.419641000 -0800
+ +++ ikidev/IkiWiki/CGI.pm 2007-02-11 21:54:36.252357000 -0800
+ @@ -408,8 +408,8 @@
@page_locs=$best_loc=$page;
}
else {
if ((defined $form->field('subpage') && length $form->field('subpage')) ||
$page eq gettext('discussion')) {
- @@ -412,7 +412,9 @@
+ @@ -420,7 +420,9 @@
}
push @page_locs, $dir.$page;
while (length $dir) {
$dir=~s![^/]+/+$!!;
push @page_locs, $dir.$page;
- diff -urx .svn -x doc -x '*.po' -x '*.pot' ikiclean/IkiWiki/Render.pm ikidev/IkiWiki/Render.pm
- --- ikiclean/IkiWiki/Render.pm 2007-01-11 15:01:51.000000000 -0800
- +++ ikidev/IkiWiki/Render.pm 2007-01-17 22:25:13.526856000 -0800
+ diff -urX ignorepats ikiclean/IkiWiki/Plugin/inline.pm ikidev/IkiWiki/Plugin/inline.pm
+ --- ikiclean/IkiWiki/Plugin/inline.pm 2007-02-11 21:40:31.996007000 -0800
+ +++ ikidev/IkiWiki/Plugin/inline.pm 2007-02-11 21:54:36.008358000 -0800
+ @@ -110,8 +110,8 @@
+
+ add_depends($params{page}, $params{pages});
+
+ - my $rssurl=rsspage(basename($params{page}));
+ - my $atomurl=atompage(basename($params{page}));
+ + my $rssurl=basename(rsspage($params{page}));
+ + my $atomurl=basename(atompage($params{page}));
+ my $ret="";
+
+ if (exists $params{rootpage} && $config{cgiurl}) {
+ @@ -285,14 +285,18 @@
+
+ sub rsspage ($) { #{{{
+ my $page=shift;
+ + $page = htmlpage($page);
+ + $page =~s/\.html$/.rss/;
+
+ - return $page.".rss";
+ + return $page;
+ } #}}}
+
+ sub atompage ($) { #{{{
+ my $page=shift;
+ + $page = htmlpage($page);
+ + $page =~s/\.html$/.atom/;
+
+ - return $page.".atom";
+ + return $page;
+ } #}}}
+
+ sub genfeed ($$$$@) { #{{{
+ diff -urX ignorepats ikiclean/IkiWiki/Render.pm ikidev/IkiWiki/Render.pm
+ --- ikiclean/IkiWiki/Render.pm 2007-02-11 21:40:32.413641000 -0800
+ +++ ikidev/IkiWiki/Render.pm 2007-02-11 21:54:36.246356000 -0800
@@ -40,6 +40,7 @@
my $dir;
1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
}
else {
$skip=0;
- diff -urx .svn -x doc -x '*.po' -x '*.pot' ikiclean/IkiWiki.pm ikidev/IkiWiki.pm
- --- ikiclean/IkiWiki.pm 2007-01-12 12:47:09.000000000 -0800
- +++ ikidev/IkiWiki.pm 2007-01-15 16:56:58.973680000 -0800
- @@ -185,6 +185,7 @@
+ diff -urX ignorepats ikiclean/IkiWiki.pm ikidev/IkiWiki.pm
+ --- ikiclean/IkiWiki.pm 2007-02-11 21:40:35.118406000 -0800
+ +++ ikidev/IkiWiki.pm 2007-02-11 22:22:49.146071000 -0800
+ @@ -188,6 +188,7 @@
sub basename ($) { #{{{
my $file=shift;
$file=~s!.*/+!!;
return $file;
} #}}}
- @@ -211,12 +212,14 @@
+ @@ -214,12 +215,14 @@
my $type=pagetype($file);
my $page=$file;
$page=~s/\Q.$type\E*$// if defined $type;
return $page.".html";
} #}}}
- @@ -300,6 +303,7 @@
+ @@ -307,6 +310,7 @@
my $page=shift;
my $link=shift;
my $cwd=$page;
if ($link=~s/^\/+//) {
# absolute links
- @@ -314,6 +318,9 @@
+ @@ -321,6 +325,9 @@
if (exists $links{$l}) {
return $l;
}
elsif (exists $pagecase{lc $l}) {
return $pagecase{lc $l};
}
- @@ -344,6 +351,7 @@
+ @@ -351,6 +358,7 @@
$page=~s/__(\d+)__/&#$1;/g;
}
$page=~y/_/ /;
return $page;
} #}}}
+ @@ -879,7 +887,7 @@
+
+ # relative matching
+ if ($glob =~ m!^\./!) {
+ - $from=~s!/?[^/]+$!!;
+ + $from=~s!/?[^/]*$!!;
+ $glob=~s!^\./!!;
+ $glob="$from/$glob" if length $from;
+ }
I independently implemented a similar, but smaller patch.
(It's smaller because I only care about rendering; not CGI, for example.)
Also note that an initial "index" is ignored. I.e. a
page "A/B/index.html" is treated as "A/B".
+> This is actually a pretty cool hack. I'll have to think about
+> whether I like it better than my way though :) --Ethan
+
+---
+
+How about doing the index stuff only on the output side? (Or does the latter patch do it? I haven't tried them.) That is, render every `foo.type` for the rendered types (mdwn etc.) as `foo/index.html`, generating links to `foo/` instead of `foo.html`, but not earlier than the point where the .html as presently appended to the page name. Then you just flip a build time option on an existing wiki without any changes to that, and the pages appear elsewhere. The `index.type` files might be left out of this scheme, though (and the top-level one, of course, has to). --[[tuomov]]
+
+> Well, get around to wasting time on it after all, and [here's the patch](http://iki.fi/tuomov/use_dirs.diff). The `-use_dirs` option will cause everything to be rendered inside directories. There may still be some problems with it, that need looking into (it doesn't e.g. check for conflicts between foo/index.mdwn and foo.mdwn), but seems to work well enough for me... The patch also improves, I think, the parentlinks code a little, as it uses generic routines to actually find the target location now. The only places where the `use_dirs` option is used is `htmlpage`, in fact, although other specific kludges needed to be removed from other points in the code.
+
+>> FWIW, [use_dirs.diff](http://iki.fi/tuomov/use_dirs.diff) applies cleanly, and works well for me. Given that it makes this behaviour optional, how about merging it? I have some follow-up patches which I'm sitting on for now. ;-) -- Ben
+
+>>> How do you apply a patch created by svn diff? I've been curious about this for a long time. The use_dirs patch looks OK but I'd like to play with it. --Ethan
+
+>>>> Just do `svn co svn://ikiwiki.kitenet.net/ikiwiki/trunk ikiwiki` then `cd ikiwiki && patch -p0 <use_dirs.diff`. :-) Same would work with a tarball as well.
+
+>>>>> Sorry, I'm dumb. I'm so used to doing -p1 that doing -p0 never occurred to me; I thought the patch format generated by svn diff was just "wrong". --Ethan
+
+----
+
+First pass over Tumov's patch -- which doesn't cleanly apply anymore, so
+I'll attach an updated and slightly modified version below.
+
+* `urlto()` is O(N) to the number of pages in the wiki, which leads to
+ O(N^2) behavior, which could be a scalability problem. This happens because
+ of the lookup for `$to` in `%renderedfiles`, which shouldn't be necessary
+ most of the time. Couldn't it just be required that `$to` be a html page
+ name on input? Or require it be a non-html page name and always run
+ htmlpage on it.
+
+ > Perhaps it would be possible to require that, but it seems like a
+ > very artificial restriction. The renderedfiles search is just a
+ > copy-paste from htmllink, and I'm no perl (or ikiwiki internals)
+ > expert... maybe there would be a faster way to do the check whether
+ > name translation is needed? No more than O(log n) steps should be
+ > needed for a simple search, after all, and maybe there would be shortcuts
+ > for even constant-time (in n) checks. --[[tuomov]]
+
+ >> Ah, so much easier to critque other people's code than your own.
+ >> You're right, this is a general problem, and I can get it to log n
+ >> if I really want to. --[[Joey]]
+
+* As we discussed in email, this will break handling of `foo/index.mdwn`
+ pages. Needs to be changed to generate `foo/index/index.html` for such
+ pages (though not for the toplevel `index`).
+
+ >> Can someone elaborate on this? What's broken about it? Will pages
+ >> foo/index/index.html include foo/index in their parentlinks? --Ethan
+
+ >>> Presently the patch does not move `foo/index.type` as `foo/index/index.html`, but renders
+ >>> it as `foo/index.html`, not because I particularly want that (except for the top-level one, of
+ >>> course), but because it could be done :). This, however, conflicts with a `foo.mdwn`
+ >>> rendered as `foo/index.html`. The easiest and cleanest way to fix this, is to simply
+ >>> not handle `index` in such a special manner -- except for the top-level one. --[[tuomov]]
+
+ >>>> Oh, I see, this patch doesn't address wanting to use foo/index.mdwn as
+ >>>> an input page. Hmm. --Ethan
+
+ >>>>> No, it doesn't. I originally also was after that, but after discussing the
+ >>>>> complexities of supporting that with Joey, came up with this simpler scheme
+ >>>>> without many of those issues. It is the output that I primarily care about, anyway,
+ >>>>> and I do, in fact, find the present input file organisation quite nice. The output
+ >>>>> locations just aren't very good for conversion of an existing site to ikiwiki, and do
+ >>>>> make for rather ugly URLs with the .html extensions. (I do often type some URLs
+ >>>>> out of memory, when they're gone from the browser's completion history, and the
+ >>>>> .html makes that more laboursome.)
+
+ >>>>>> I support your decision, but now this wiki page serves two different patches :).
+ >>>>>> Can we split them somehow?
+ >>>>>> What are the complexities involved?
+ >>>>>> I think I overcomplicated it a little with my patch, and Per Bothner's gets
+ >>>>>> much closer to the heart of it. --Ethan
+
+* This does make the resulting wikis much less browsable directly on the
+ filesystem, since `dir` to `dir/index.html` conversion is only handled by web
+ servers and so you end up browsing to a directory index all the time.
+ Wouldn't it be better to make the links themselves include the index.html?
+ (Although that would mean that [[bugs/broken_parentlinks]] would not be
+ fixed en passant by this patch..)
+
+ > Yes, the sites are not that browsable on the FS (I blame the browsers
+ > for being stupid!), but linking to the directory produces so much
+ > cleaner URLs for the Web, that I specifically want it. This is,
+ > after all, an optional arrangement.
+
+ >> It's optional for *now* ... I suppose that I could make adding the
+ >> index.html yet another option. I'm not _that_ fond of optioons
+ >> however. --[[Joey]]
+
+ >>> It is worth noting, that with this patch, you _can_ render the local
+ >>> copy in the present manner, while rendering the Web copy under
+ >>> directories. So no extra options are really needed for local browsing,
+ >>> unless you also want to serve the same copy over the Web, which I
+ >>> doubt. --[[tuomov]]
+
+* Some of the generated links are missing the trailing / , which is
+ innefficient since it leads to a http redirect when clicking on that
+ link. Seems to be limited to ".." links, and possibly only to
+ parentlinks. (Already fixed it for "." links.)
+
+ > The solution seems to be to add to `urlto` the following snippet,
+ > which might also help with the next point. (Sorry, no updated patch
+ > yet. Should be on my way out in the cold anyway...)
+
+ if ( !length $to ) {
+ return baseurl($from);
+ }
+
+
+* It calles abs2rel about 16% more often with the patch, which makes it
+ a bit slower, since abs2rel is not very efficient. (This omits abs2rel
+ calls that might be memoized away already.) This seems to be due to one
+ extra abs2rel for the toplevel wiki page due to the nicely cleaned up code
+ in `parentlinks` -- so I'm not really complaining.. Especially since the
+ patch adds a new nice memoizable `urlto`.
+* The rss page name generation code seems unnecesarily roundabout, I'm sure
+ that can be cleaned up somehow, perhaps by making `htmlpage` more
+ generic.
+
+ > Something like `targetpage(basename, extension)`?
+
+ >> Yes exactly. It might also be possible to remove htmlpage from the
+ >> plugin interface entirely (in favour of urlto), which would be a
+ >> good time to make such a changes. Not required to accept this patch
+ >> though.
+
+* `aggregate.pm` uses htmlpage in a way that breaks with its new behavior.
+ It will need to be changed as follows:
+
+<pre>
+Index: aggregate.pm
+===================================================================
+--- aggregate.pm (revision 2700)
++++ aggregate.pm (working copy)
+@@ -320,7 +320,7 @@
+ # NB: This doesn't check for path length limits.
+ eval q{use POSIX};
+ my $max=POSIX::pathconf($config{srcdir}, &POSIX::_PC_NAME_MAX);
+- if (defined $max && length(htmlpage($page)) >= $max) {
++ if (defined $max && length(htmlfn($page)) >= $max) {
+ $c="";
+ $page=$feed->{dir}."/item";
+ while (exists $IkiWiki::pagecase{lc $page.$c} ||
+@@ -356,7 +356,7 @@
+ if (ref $feed->{tags}) {
+ $template->param(tags => [map { tag => $_ }, @{$feed->{tags}}]);
+ }
+- writefile(htmlpage($guid->{page}), $config{srcdir},
++ writefile(htmlfn($guid->{page}), $config{srcdir},
+ $template->output);
+
+ # Set the mtime, this lets the build process get the right creation
+@@ -434,4 +434,8 @@
+ return "$config{srcdir}/".htmlpage($page);
+ } #}}}
+
++sub htmlfn ($) { #{{{
++ return shift().".html";
++} #}}}
++
+ 1
+</pre>
+
+* `linkmap.pm` uses `htmlpage` to construct a link and should probably be
+ changed like this (untested):
+
+<pre>
+Index: linkmap.pm
+===================================================================
+--- linkmap.pm (revision 2700)
++++ linkmap.pm (working copy)
+@@ -50,8 +50,7 @@
+ foreach my $item (keys %links) {
+ if (pagespec_match($item, $params{pages}, $params{page})) {
+ my $link=htmlpage($item);
+- $link=IkiWiki::abs2rel($link, IkiWiki::dirname($params{page}));
+- $mapitems{$item}=$link;
++ $mapitems{$item}=urlto($link, $params{destpage});
+ }
+ }
+</pre>
+
+> This is probably supposed to be `$mapitems{$item}=urlto($item, $params{destpage});`,
+> which does indeed remove one more `htmlpage` call from the plugins. I can't actually
+> try it: "failed writing to dst/ts.png.ikiwiki-new: Inappropriate ioctl for device".
+
+>> Crazy perl bug that ioctl thing. Worked around now in svn. --[[Joey]]
+
+> After this probable fix, in fact, all uses of htmlpage in the plugins are used to
+> construct an absolute address: the absolute url in most cases, so an `absurl`
+> call could be added to be used instead of htmlpage, and something else in the
+> aggregate plugin (above), that I also think isn't what's wanted:
+> aren't `foo.html` pages also "rendered", so that they get moved as `foo/index.html`?
+> --[[tuomov]]
+
+* `inline.pm` uses htmlpage and `abs2rel` to generate a link, and probably
+ needs to be changed to either use `urlto` or to call `beautify_url` like
+ htmllink does. This might work:
+
+<pre>
+Index: inline.pm
+===================================================================
+--- inline.pm (revision 2700)
++++ inline.pm (working copy)
+@@ -150,10 +150,7 @@
+ # Don't use htmllink because this way the
+ # title is separate and can be overridden by
+ # other plugins.
+- my $link=bestlink($params{page}, $page);
+- $link=htmlpage($link) if defined $type;
+- $link=abs2rel($link, dirname($params{destpage}));
+- $template->param(pageurl => $link);
++ $template->param(pageurl => urlto(bestlink($params{page}, $page), $params{destpage}));
+ $template->param(title => pagetitle(basename($page)));
+ $template->param(ctime => displaytime($pagectime{$page}));
+</pre>
+
+* `img.pm` makes some assumptions about name of the page that will be
+ linking to the image, which are probably broken.
+
+* The changes to htmlpage's behavior probably call for the plugin
+ interface version number to be changed.
+
+--[[Joey]]
+
+Updated version of Tumov's patch follows:
+
+<pre>
+Index: IkiWiki/Render.pm
+===================================================================
+--- IkiWiki/Render.pm (revision 2700)
++++ IkiWiki/Render.pm (working copy)
+@@ -32,8 +32,8 @@
+ my @links;
+ return unless $backlinks{$page};
+ foreach my $p (keys %{$backlinks{$page}}) {
+- my $href=abs2rel(htmlpage($p), dirname($page));
+-
++ my $href=urlto($p, $page);
++
+ # Trim common dir prefixes from both pages.
+ my $p_trimmed=$p;
+ my $page_trimmed=$page;
+@@ -55,18 +55,14 @@
+ my @ret;
+ my $pagelink="";
+ my $path="";
+- my $skip=1;
++ my $title=$config{wikiname};
++
+ return if $page eq 'index'; # toplevel
+- foreach my $dir (reverse split("/", $page)) {
+- if (! $skip) {
+- $path.="../";
+- unshift @ret, { url => $path.htmlpage($dir), page => pagetitle($dir) };
+- }
+- else {
+- $skip=0;
+- }
++ foreach my $dir (split("/", $page)) {
++ push @ret, { url => urlto($path, $page), page => $title };
++ $path.="/".$dir;
++ $title=pagetitle($dir);
+ }
+- unshift @ret, { url => length $path ? $path : ".", page => $config{wikiname} };
+ return @ret;
+ } #}}}
+
+Index: IkiWiki/Plugin/inline.pm
+===================================================================
+--- IkiWiki/Plugin/inline.pm (revision 2700)
++++ IkiWiki/Plugin/inline.pm (working copy)
+@@ -110,8 +110,8 @@
+
+ add_depends($params{page}, $params{pages});
+
+- my $rssurl=rsspage(basename($params{page}));
+- my $atomurl=atompage(basename($params{page}));
++ my $rssurl=basename(rsspage($params{page}));
++ my $atomurl=basename(atompage($params{page}));
+ my $ret="";
+
+ if (exists $params{rootpage} && $config{cgiurl}) {
+@@ -151,8 +151,7 @@
+ # title is separate and can be overridden by
+ # other plugins.
+ my $link=bestlink($params{page}, $page);
+- $link=htmlpage($link) if defined $type;
+- $link=abs2rel($link, dirname($params{destpage}));
++ $link=urlto($link, $params{destpage});
+ $template->param(pageurl => $link);
+ $template->param(title => pagetitle(basename($page)));
+ $template->param(ctime => displaytime($pagectime{$page}));
+@@ -205,15 +204,17 @@
+ }
+
+ if ($rss) {
+- will_render($params{page}, rsspage($params{page}));
+- writefile(rsspage($params{page}), $config{destdir},
++ my $rssp=rsspage($params{page});
++ will_render($params{page}, $rssp);
++ writefile($rssp, $config{destdir},
+ genfeed("rss", $rssurl, $desc, $params{page}, @list));
+ $toping{$params{page}}=1 unless $config{rebuild};
+ $feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/rss+xml" title="RSS" href="$rssurl" />};
+ }
+ if ($atom) {
+- will_render($params{page}, atompage($params{page}));
+- writefile(atompage($params{page}), $config{destdir},
++ my $atomp=atompage($params{page});
++ will_render($params{page}, $atomp);
++ writefile($atomp, $config{destdir},
+ genfeed("atom", $atomurl, $desc, $params{page}, @list));
+ $toping{$params{page}}=1 unless $config{rebuild};
+ $feedlinks{$params{destpage}}=qq{<link rel="alternate" type="application/atom+xml" title="Atom" href="$atomurl" />};
+@@ -288,16 +289,25 @@
+ return $content;
+ } #}}}
+
++sub basepage ($) { #{{{
++ my $page=shift;
++
++ $page=htmlpage($page);
++ $page =~ s/\.html$//;
++
++ return $page;
++} #}}}
++
+ sub rsspage ($) { #{{{
+ my $page=shift;
+
+- return $page.".rss";
++ return basepage($page).".rss";
+ } #}}}
+
+ sub atompage ($) { #{{{
+ my $page=shift;
+
+- return $page.".atom";
++ return basepage($page).".atom";
+ } #}}}
+
+ sub genfeed ($$$$@) { #{{{
+Index: ikiwiki.in
+===================================================================
+--- ikiwiki.in (revision 2700)
++++ ikiwiki.in (working copy)
+@@ -46,6 +46,7 @@
+ "sslcookie!" => \$config{sslcookie},
+ "httpauth!" => \$config{httpauth},
+ "userdir=s" => \$config{userdir},
++ "usedirs!" => \$config{usedirs},
+ "exclude=s@" => sub {
+ push @{$config{wiki_file_prune_regexps}}, $_[1];
+ },
+Index: doc/usage.mdwn
+===================================================================
+--- doc/usage.mdwn (revision 2700)
++++ doc/usage.mdwn (working copy)
+@@ -244,6 +244,10 @@
+
+ Log to syslog(3).
+
++* --usedirs
++
++ Create output files named page/index.html instead of page.html.
++
+ * --w3mmode, --no-w3mmode
+
+ Enable [[w3mmode]], which allows w3m to use ikiwiki as a local CGI script,
+Index: doc/plugins/write.mdwn
+===================================================================
+--- doc/plugins/write.mdwn (revision 2700)
++++ doc/plugins/write.mdwn (working copy)
+@@ -412,6 +412,10 @@
+
+ This is the standard gettext function, although slightly optimised.
+
++#### `urlto($$)`
++
++Construct a relative url to the first parameter from the second.
++
+ ## RCS plugins
+
+ ikiwiki's support for revision control systems also uses pluggable perl
+Index: doc/ikiwiki.setup
+===================================================================
+--- doc/ikiwiki.setup (revision 2700)
++++ doc/ikiwiki.setup (working copy)
+@@ -94,6 +94,8 @@
+ syslog => 0,
+ # To link to user pages in a subdirectory of the wiki.
+ #userdir => "users",
++ # To enable alternate output filenames.
++ #usedirs => 1,
+
+ # To add plugins, list them here.
+ #add_plugins => [qw{goodstuff openid search wikitext camelcase
+Index: IkiWiki.pm
+===================================================================
+--- IkiWiki.pm (revision 2700)
++++ IkiWiki.pm (working copy)
+@@ -14,7 +14,7 @@
+ use Exporter q{import};
+ our @EXPORT = qw(hook debug error template htmlpage add_depends pagespec_match
+ bestlink htmllink readfile writefile pagetype srcfile pagename
+- displaytime will_render gettext
++ displaytime will_render gettext urlto
+ %config %links %renderedfiles %pagesources);
+ our $VERSION = 1.02; # plugin interface version, next is ikiwiki version
+ our $version='unknown'; # VERSION_AUTOREPLACE done by Makefile, DNE
+@@ -72,6 +72,7 @@
+ sslcookie => 0,
+ httpauth => 0,
+ userdir => "",
++ usedirs => 0
+ } #}}}
+
+ sub checkconfig () { #{{{
+@@ -226,7 +227,11 @@
+ sub htmlpage ($) { #{{{
+ my $page=shift;
+
+- return $page.".html";
++ if (! $config{usedirs} || $page =~ /^index$/ || $page =~ /\/index$/) {
++ return $page.".html";
++ } else {
++ return $page."/index.html";
++ }
+ } #}}}
+
+ sub srcfile ($) { #{{{
+@@ -390,6 +395,7 @@
+
+ return "$config{url}/" if ! defined $page;
+
++ $page=htmlpage($page);
+ $page=~s/[^\/]+$//;
+ $page=~s/[^\/]+\//..\//g;
+ return $page;
+@@ -419,6 +425,29 @@
+ $config{timeformat}, localtime($time)));
+ } #}}}
+
++sub beautify_url ($) { #{{{
++ my $url=shift;
++
++ $url =~ s!/index.html$!/!;
++ $url =~ s!^$!./!; # Browsers don't like empty links...
++
++ return $url;
++} #}}}
++
++sub urlto ($$) { #{{{
++ my $to=shift;
++ my $from=shift;
++
++ if (length $to &&
++ ! grep { $_ eq $to } map { @{$_} } values %renderedfiles) {
++ $to=htmlpage($to);
++ }
++
++ my $link = abs2rel($to, dirname(htmlpage($from)));
++
++ return beautify_url($link);
++} #}}}
++
+ sub htmllink ($$$;@) { #{{{
+ my $lpage=shift; # the page doing the linking
+ my $page=shift; # the page that will contain the link (different for inline)
+@@ -454,7 +483,8 @@
+ "\">?</a>$linktext</span>"
+ }
+
+- $bestlink=abs2rel($bestlink, dirname($page));
++ $bestlink=abs2rel($bestlink, dirname(htmlpage($page)));
++ $bestlink=beautify_url($bestlink);
+
+ if (! $opts{noimageinline} && isinlinableimage($bestlink)) {
+ return "<img src=\"$bestlink\" alt=\"$linktext\" />";
+</pre>