safe => 1,
rebuild => 1,
},
+ reverse_proxy => {
+ type => "boolean",
+ default => 0,
+ description => "do not adjust cgiurl if CGI is accessed via different URL",
+ advanced => 0,
+ safe => 1,
+ rebuild => 0, # only affects CGI requests
+ },
cgi_wrapper => {
type => "string",
default => '',
},
useragent => {
type => "string",
- default => undef,
+ default => "ikiwiki/$version",
example => "Wget/1.13.4 (linux-gnu)",
description => "set custom user agent string for outbound HTTP requests e.g. when fetching aggregated RSS feeds",
safe => 0,
$local_cgiurl = $cgiurl->path;
- if ($cgiurl->scheme ne $baseurl->scheme or
- $cgiurl->authority ne $baseurl->authority) {
+ if ($cgiurl->scheme eq 'https' &&
+ $baseurl->scheme eq 'http') {
+ # We assume that the same content is available
+ # over both http and https, because if it
+ # wasn't, accessing the static content
+ # from the CGI would be mixed-content,
+ # which would be a security flaw.
+
+ if ($cgiurl->authority ne $baseurl->authority) {
+ # use protocol-relative URL for
+ # static content
+ $local_url = "$config{url}/";
+ $local_url =~ s{^http://}{//};
+ }
+ # else use host-relative URL for static content
+
+ # either way, CGI needs to be absolute
+ $local_cgiurl = $config{cgiurl};
+ }
+ elsif ($cgiurl->scheme ne $baseurl->scheme) {
# too far apart, fall back to absolute URLs
$local_url = "$config{url}/";
$local_cgiurl = $config{cgiurl};
}
+ elsif ($cgiurl->authority ne $baseurl->authority) {
+ # slightly too far apart, fall back to
+ # protocol-relative URLs
+ $local_url = "$config{url}/";
+ $local_url =~ s{^https?://}{//};
+ $local_cgiurl = $config{cgiurl};
+ $local_cgiurl =~ s{^https?://}{//};
+ }
+ # else keep host-relative URLs
}
$local_url =~ s{//$}{/};
my $template=template("page.tmpl");
- my $topurl = defined $cgi ? $cgi->url : $config{url};
+ my $topurl = $config{url};
+ if (defined $cgi && ! $config{w3mmode} && ! $config{reverse_proxy}) {
+ $topurl = $cgi->url;
+ }
my $page="";
if (exists $params{page}) {
$page=delete $params{page};
- $params{forcebaseurl}=urlabs(urlto($page), $topurl);
+ $params{forcebaseurl}=urlto($page);
+ if (! $config{html5}) {
+ $params{forcebaseurl}=urlabs($params{forcebaseurl}, $topurl);
+ }
}
run_hooks(pagetemplate => sub {
shift->(
});
templateactions($template, "");
+ my $baseurl = baseurl();
+ if (! $config{html5}) {
+ $baseurl = urlabs($baseurl, $topurl),
+ }
+
$template->param(
dynamic => 1,
title => $title,
wikiname => $config{wikiname},
content => $content,
- baseurl => urlabs(baseurl(), $topurl),
+ baseurl => $baseurl,
html5 => $config{html5},
%params,
);
sub redirect ($$) {
my $q=shift;
eval q{use URI};
- my $url=URI->new(urlabs(shift, $q->url));
+
+ my $topurl;
+ if (defined $q && ! $config{w3mmode} && ! $config{reverse_proxy}) {
+ $topurl = $q->url;
+ }
+
+ my $url=URI->new(urlabs(shift, $topurl));
if (! $config{w3mmode}) {
print $q->redirect($url);
}
if ($] < 5.01) {
my $cgi = shift;
foreach my $f ($cgi->param) {
- $cgi->param($f, map { decode_utf8 $_ } $cgi->param($f));
+ $cgi->param($f, map { decode_utf8 $_ }
+ @{$cgi->param_fetch($f)});
}
}
}
return if ! defined $form->field("do") || ($form->field("do") ne "edit" && $form->field("do") ne "create") ;
- my $filename=Encode::decode_utf8($q->param('attachment'));
+ my $filename=Encode::decode_utf8(scalar $q->param('attachment'));
if (defined $filename && length $filename) {
attachment_store($filename, $form, $q, $params{session});
}
}
if ($form->submitted eq "Insert Links") {
- my $page=quotemeta(Encode::decode_utf8($q->param("page")));
+ my $page=quotemeta(Encode::decode_utf8(scalar $q->param("page")));
my $add="";
- foreach my $f ($q->param("attachment_select")) {
+ foreach my $f (@{$q->param_fetch("attachment_select")}) {
$f=Encode::decode_utf8($f);
$f=~s/^$page\///;
if (IkiWiki::isinlinableimage($f) &&
$username =~ s/"/"/g;
$content .= " username=\"$username\"\n";
}
+
if (defined $session->param('nickname')) {
my $nickname = $session->param('nickname');
$nickname =~ s/"/"/g;
$content .= " nickname=\"$nickname\"\n";
}
- elsif (defined $session->remote_addr()) {
+
+ if (!(defined $session->param('name') || defined $session->param('nickname')) &&
+ defined $session->remote_addr()) {
$content .= " ip=\"".$session->remote_addr()."\"\n";
}
else {
# save page
check_canedit($page, $q, $session);
- checksessionexpiry($q, $session, $q->param('sid'));
+ checksessionexpiry($q, $session);
my $exists=-e "$config{srcdir}/$file";
my $page = shift;
if (!defined $page) {
- $page = IkiWiki::decode_utf8($q->param("page"));
+ $page = IkiWiki::decode_utf8(scalar $q->param("page"));
if (!defined $page) {
error("missing page parameter");
my $session=shift;
if ($q->param('do') eq 'blog') {
- my $page=titlepage(decode_utf8($q->param('title')));
+ my $page=titlepage(decode_utf8(scalar $q->param('title')));
$page=~s/(\/)/"__".ord($1)."__"/eg; # don't create subdirs
# if the page already exists, munge it to be unique
my $from=$q->param('from');
my $claimed_identity = $csr->claimed_identity($openid_url);
if (! $claimed_identity) {
if ($errhandler) {
- $errhandler->($csr->err);
+ if (ref($errhandler) eq 'CODE') {
+ $errhandler->($csr->err);
+ }
return 0;
}
else {
}
elsif (defined $q->param('openid_identifier')) {
# myopenid.com affiliate support
- validate($q, $session, $q->param('openid_identifier'));
+ validate($q, $session, scalar $q->param('openid_identifier'));
}
}
my $cgi=shift;
my $session=shift;
if (defined $cgi->param('do') && $cgi->param('do') eq "poll") {
- my $choice=decode_utf8($cgi->param('choice'));
+ my $choice=decode_utf8(scalar $cgi->param('choice'));
if (! defined $choice || not length $choice) {
error("no choice specified");
}
IkiWiki::decode_form_utf8($form);
if ($form->submitted eq 'Revert' && $form->validate) {
- IkiWiki::checksessionexpiry($q, $session, $q->param('sid'));
+ IkiWiki::checksessionexpiry($q, $session);
my $message=sprintf(gettext("This reverts commit %s"), $rev);
if (defined $form->field('revertmessage') &&
length $form->field('revertmessage')) {
postremove($session);
}
elsif ($form->submitted eq 'Remove' && $form->validate) {
- IkiWiki::checksessionexpiry($q, $session, $q->param('sid'));
+ IkiWiki::checksessionexpiry($q, $session);
my @pages=$form->field("page");
# on it.
$oldcgi->param("editcontent",
renamepage_hook($dest, $src, $dest,
- $oldcgi->param("editcontent")));
+ scalar $oldcgi->param("editcontent")));
# Get a new edit token; old was likely invalidated.
$oldcgi->param("rcsinfo",
if ($q->param("do") eq 'rename') {
my $session=shift;
- my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8($q->param("page")));
+ my ($form, $buttons)=rename_form($q, $session, Encode::decode_utf8(scalar $q->param("page")));
IkiWiki::decode_form_utf8($form);
my $src=$form->field("page");
postrename($q, $session, $src);
}
elsif ($form->submitted eq 'Rename' && $form->validate) {
- IkiWiki::checksessionexpiry($q, $session, $q->param('sid'));
+ IkiWiki::checksessionexpiry($q, $session);
# These untaints are safe because of the checks
# performed in check_canrename later.
IkiWiki::Plugin::attachment::is_held_attachment($src);
if ($held) {
rename($held, IkiWiki::Plugin::attachment::attachment_holding_location($dest));
- postrename($q, $session, $src, $dest, $q->param("attachment"))
+ postrename($q, $session, $src, $dest, scalar $q->param("attachment"))
unless defined $srcfile;
}
$renamesummary.=$template->output;
}
- postrename($q, $session, $src, $dest, $q->param("attachment"));
+ postrename($q, $session, $src, $dest, scalar $q->param("attachment"));
}
else {
IkiWiki::showform($form, $buttons, $session, $q);
}
$pagecase{lc $page}=$page;
if (! exists $pagectime{$page}) {
- $pagectime{$page}=(srcfile_stat($file))[10];
+ my $ctime=(srcfile_stat($file, 1))[10];
+ $pagectime{$page}=$ctime if defined $ctime;
}
}
}
my @internal_changed;
foreach my $file (@$files) {
my $page=pagename($file);
- my ($srcfile, @stat)=srcfile_stat($file);
- if (! exists $pagemtime{$page} ||
- $stat[9] > $pagemtime{$page} ||
- $forcerebuild{$page}) {
+ my ($srcfile, @stat)=srcfile_stat($file, 1);
+ if (defined $srcfile &&
+ (! exists $pagemtime{$page} ||
+ $stat[9] > $pagemtime{$page} ||
+ $forcerebuild{$page})) {
$pagemtime{$page}=$stat[9];
if (isinternal($page)) {
outprogs=ikiwiki.out ikiwiki-transition.out ikiwiki-calendar.out
scripts=ikiwiki-update-wikilist ikiwiki-makerepo
sysconfdir_scripts=ikiwiki-mass-rebuild ikiwiki-update-wikilist
+shebang_scripts=$(shell $(FIND) . -type f \( -name '*.in' -o -name '*.cgi' -o -name '*.pm' -o -name '*.pm.example' -o -name '*.t' -o -name '*.setup' -o -name 'ikiwiki-mass-rebuild' -o -name 'ikiwiki-update-wikilist' -o -name 'gitremotes' -o -name 'mdwn2man' -o -name 'pm_filter' -o -name 'po2wiki' -o -name 'externaldemo' \))
PROBABLE_INST_LIB=$(shell \\
if [ "$(INSTALLDIRS)" = "perl" ]; then \\
ikiwiki.setup:
HOME=/home/me $(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.in -dumpsetup ikiwiki.setup
-extra_build: $(outprogs) ikiwiki.setup docwiki sysconfdir
+extra_build: perl_shebangs $(outprogs) ikiwiki.setup docwiki sysconfdir
./mdwn2man ikiwiki 1 doc/usage.mdwn > ikiwiki.man
./mdwn2man ikiwiki-mass-rebuild 8 doc/ikiwiki-mass-rebuild.mdwn > ikiwiki-mass-rebuild.man
./mdwn2man ikiwiki-makerepo 1 doc/ikiwiki-makerepo.mdwn > ikiwiki-makerepo.man
docwiki:
$(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.in -setup docwiki.setup -refresh
+perl_shebangs:
+ifneq "$(PERL)" "/usr/bin/perl"
+ for file in $(shebang_scripts); do \
+ $(SED) -e "1s|^#!/usr/bin/perl|#!$(PERL)|" < $$file > "$$file.new"; \
+ [ -x $$file ] && chmod +x "$$file.new"; \
+ mv -f "$$file.new" $$file; \
+ done
+endif
+
+perl_shebangs_clean:
+ifneq "$(PERL)" "/usr/bin/perl"
+ for file in $(shebang_scripts); do \
+ $(SED) -e "1s|^#!$(PERL)|#!/usr/bin/perl|" < $$file > "$$file.new"; \
+ [ -x $$file ] && chmod +x "$$file.new"; \
+ mv -f "$$file.new" $$file; \
+ done
+endif
+
sysconfdir:
- $(PERL) -pi -e "s|/etc/ikiwiki|$(SYSCONFDIR)|g" $(sysconfdir_scripts)
+ $(PERL) -pi -e "s|\"/etc/ikiwiki|\"$(SYSCONFDIR)|g" $(sysconfdir_scripts)
-extra_clean:
+extra_clean: perl_shebangs_clean
$(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.in -setup docwiki.setup -clean
rm -f *.man $(outprogs) ikiwiki.setup plugins/*.pyc
$(MAKE) -C po clean
+ikiwiki (3.20141016) unstable; urgency=medium
+
+ [ Joey Hess ]
+ * Fix crash that can occur when only_committed_changes is set and a
+ file is deleted from the underlay.
+
+ [ Simon McVittie ]
+ * core: avoid dangerous use of CGI->param in list context, which led
+ to a security flaw in Bugzilla; as far as we can tell, ikiwiki
+ is not vulnerable to a similar attack, but it's best to be safe
+ * core: new reverse_proxy option prevents ikiwiki from trying to detect
+ how to make self-referential URLs by using the CGI environment variables,
+ for instance when it's deployed behind a HTTP reverse proxy
+ (Closes: #745759)
+ * core: the default User-Agent is now "ikiwiki/$version" to work around
+ ModSecurity rules assuming that only malware uses libwww-perl
+ * core: use protocol-relative URLs (e.g. //www.example.com/wiki) so that
+ https stays on https and http stays on http, particularly if the
+ html5 option is enabled
+ * core: avoid mixed content when a https cgiurl links to http static pages
+ on the same server (the static pages are assumed to be accessible via
+ https too)
+ * core: force the correct top URL in w3mmode
+ * google plugin: Use search form
+ * docwiki: replace Paypal and Flattr buttons with text links
+ * comments: don't record the IP address in the wiki if the user is
+ logged in via passwordauth or httpauth
+ * templates: add ARIA roles to some page elements, if html5 is enabled.
+ Thanks, Patrick
+ * debian: build-depend on libmagickcore-6.q16-2-extra | libmagickcore-extra
+ so we can thumbnail SVGs in the docwiki
+ * debian: explicitly depend and build-depend on libcgi-pm-perl
+ * debian: drop unused python-support dependency
+ * debian: rename debian/link to debian/links so the intended symlinks appear
+ * debian: fix some wrong paths in the copyright file
+
+ -- Simon McVittie <smcv@debian.org> Thu, 16 Oct 2014 23:28:26 +0100
+
ikiwiki (3.20140916) unstable; urgency=low
* Don't double-decode CGI submissions with Encode.pm >= 2.53,
libtimedate-perl, libhtml-template-perl,
libhtml-scrubber-perl, wdg-html-validator,
libhtml-parser-perl, liburi-perl (>= 1.36), perlmagick, po4a (>= 0.34),
- libfile-chdir-perl, libyaml-libyaml-perl, python-support, librpc-xml-perl,
- libcgi-session-perl, ghostscript
+ libfile-chdir-perl, libyaml-libyaml-perl, librpc-xml-perl,
+ libcgi-pm-perl, libcgi-session-perl, ghostscript,
+ libmagickcore-6.q16-2-extra | libmagickcore-extra
Maintainer: Joey Hess <joeyh@debian.org>
Uploaders: Josh Triplett <josh@freedesktop.org>,
Simon McVittie <smcv@debian.org>
libc6-dev | libc-dev,
git (>= 1:1.7) | git-core (>= 1:1.5.0) | subversion | tla | bzr (>= 0.91) | mercurial | monotone (>= 0.38) | darcs,
libxml-simple-perl, libnet-openid-consumer-perl, libcrypt-ssleay-perl,
- liblwpx-paranoidagent-perl, libtimedate-perl,
+ liblwpx-paranoidagent-perl, libtimedate-perl, libcgi-pm-perl,
libcgi-formbuilder-perl (>= 3.05), libcgi-session-perl (>= 4.14-1),
libmail-sendmail-perl, libauthen-passphrase-perl, libterm-readline-gnu-perl,
libgravatar-url-perl, librpc-xml-perl,
License: GPL-2+
Files: templates/* underlays/basewiki/* doc/ikiwiki/directive/*
- ikiwiki.setup po/underlay/*
+ ikiwiki.setup po/underlays/*
Copyright: © 2006-2010 Joey Hess <joey@ikiwiki.info>
License: other
Redistribution and use in source and compiled forms, with or without
Copyright: © 2006 Christian Mock <cm@tahina.priv.at>
License: GPL-2+
-Files: IkiWiki/Plugin/topography.pm
-Copyright: © 2006 Recai Oktaş <roktas@debian.org>
-License: GPL-2+
-
Files: IkiWiki/Plugin/map.pm
Copyright: © 2006 Alessandro Dotti Contra
License: GPL-2+
Smileys were copied from Moin Moin.
Files: doc/smileys/neutral.png
- doc/smileys/question.pn
+ doc/smileys/question.png
Copyright: (c) 2002 phpBB Group
License: GPL-2
These smileys were copied from phpBB.
© 2011 The Dojo Foundation
License: GPL-2
-Files: underlays/attachments/ikiwiki/jquery-ui*
+Files: underlays/attachment/ikiwiki/jquery-ui*
Copyright: © 2008 Paul Bakaus
© 2011 the jQuery UI Authors (http://jqueryui.com/about)
License: GPL-2
-Files: underlays/attachments/ikiwiki/jquery.tmpl*
+Files: underlays/attachment/ikiwiki/jquery.tmpl*
Copyright: © Boris Moore
License: GPL-2
-Files: underlays/attachments/ikiwiki/
+Files: underlays/attachment/ikiwiki/*
Copyright: 2010, 2011 Sebastian Tschan
Comment:
blueimp / jQuery-File-Upload widget,
from https://github.com/blueimp/jQuery-File-Upload
License: Expat
-Files: underlays/themes/blueview/style.css
+Files: themes/blueview/style.css
Copyright: © 2009,2010 Bernd Zeimetz
© 2008 Yahoo! Inc.
Comment:
http://developer.yahoo.com/yui/license.html
License: GPL-2+
-Files: underlays/themes/blueview/*
+Files: themes/blueview/*
Copyright: © 2009,2010 Bernd Zeimetz
License: GPL-2+
-Files: underlays/themes/goldtype/*
+Files: themes/goldtype/*
Copyright: © Lars Wirzenius
License: GPL-2+
-Files: underlays/themes/monochrome/*
+Files: themes/monochrome/*
Copyright: © 2012 Jon Dowland
License: GPL-2+
+++ /dev/null
-usr/share/ikiwiki/examples usr/share/doc/ikiwiki/examples
-usr/share/common-licenses/GPL-2 usr/share/doc/ikiwiki/html/GPL
--- /dev/null
+usr/share/ikiwiki/examples usr/share/doc/ikiwiki/examples
+usr/share/common-licenses/GPL-2 usr/share/doc/ikiwiki/html/GPL
--- /dev/null
+Name: ikiwiki
+Bug-Database: http://ikiwiki.info/bugs/
+Bug-Submit: http://ikiwiki.info/bugs/
+Changelog: http://ikiwiki.info/news/
+Donation: http://ikiwiki.info/tipjar/
--- /dev/null
+This fix is probably just changing
+
+http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=templates/googleform.tmpl;h=9468e062ab19a381f6dadb339480796efae827f5;hb=HEAD#l2
+
+to
+
+ <form method="get" action="//www.google.com/search" id="searchform">
+
+> I changed it to use https unconditionally - there seems little point
+> in doing Google searches in clear-text when Google supports https,
+> even on unencrypted wikis. [[done]] --[[smcv]]
And the extra newlines break the table. Can they be safely removed?
> If you want an HTML table, I would suggest using an HTML table, which
-> should pass through Markdown without being interpreted further:
->
-> <table><tr>
-> \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=tagtd]]
-> </tr></table>
->
-> where tagtd.tmpl is of the form `<td>your markup here</td>`; or even just
+> should pass through Markdown without being interpreted further. To
+> avoid getting the `<div>` inside the `<table>` you can use:
>
> \[[!inline pages="link(/category/env)" feeds=no archive=yes sort=title template=tagtable]]
>
--- /dev/null
+<http://developers.google.com/speed/pagespeed/insights/>
+
+indicates the viewport on mobile needs to be configured, e.g. `<meta name=viewport content="width=device-width, initial-scale=1">` in the header of
+
+http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=templates/page.tmpl;
+
+> This seems a lot like
+> [an "unbreak my application" option](http://ometer.com/free-software-ui.html)
+> but OK... presumably the motivation for this being opt-in is that "most"
+> websites have some sort of hard-coded fixed-width layout suitable for
+> a proportion of desktop browsers, rather than being responsive to window
+> size like they should have been all along. --[[smcv]]
+
+Further more:
+
+* fonts need to be tweaked
+
+ > Suggestions?
+ >
+ > (Note that Joey has generally rejected stylistic changes to the default
+ > anti-theme; enhancing the other themes would be OK though.)
+ > --[[smcv]]
+
+* XHTML should be dropped !
+
+ > Already in the to-do list: [[todo/generate HTML5 by default]]. --[[smcv]]
+
+I'm practicing this on http://dabase.com/ with <http://source.dabase.branchable.com/?p=source.git;a=blob;f=templates/page.tmpl;>
+
+> [[!format diff """
+-<TMPL_IF FORCEBASEURL><base href="<TMPL_VAR FORCEBASEURL>" /><TMPL_ELSE>
+-<TMPL_IF BASEURL><base href="<TMPL_VAR BASEURL>" /></TMPL_IF>
+"""]]
+> You probably don't want to delete those. It breaks the CGI. --[[smcv]]
A quick workaround might be to force the use of $config{url} instead of $cgi->url as a base for URLs when w3mmode is set.
-- Martin
+
+> [[Fixed|done]] --[[smcv]]
--- /dev/null
+## What I did
+
+A friend reported this, and I'm seeing it too. With 3.20140916, on
+a system with Python 2.7 and 3.4 (and little else) installed, I
+tried to run the auto.setup:
+
+ :; ikiwiki --setup /etc/pkg/ikiwiki/auto.setup
+ What will the wiki be named? Import Errors
+ What revision control system to use? git
+ Which user (wiki account or openid) will be admin? schmonz
+
+
+ Setting up Import Errors ...
+ Importing /Users/schmonz/ImportErrors into git
+ Initialized empty shared Git repository in /Users/schmonz/ImportErrors.git/
+ Initialized empty Git repository in /Users/schmonz/ImportErrors/.git/
+ [master (root-commit) 20b1128] initial commit
+ 1 file changed, 1 insertion(+)
+ create mode 100644 .gitignore
+ Counting objects: 3, done.
+ Writing objects: 100% (3/3), 230 bytes | 0 bytes/s, done.
+ Total 3 (delta 0), reused 0 (delta 0)
+ To /Users/schmonz/ImportErrors.git
+ * [new branch] master -> master
+ Directory /Users/schmonz/ImportErrors is now a clone of git repository /Users/schmonz/ImportErrors.git
+ Traceback (most recent call last):
+ File "/usr/pkg/lib/ikiwiki/plugins/rst", line 45, in <module>
+ from proxy import IkiWikiProcedureProxy
+ File "/usr/pkg/lib/ikiwiki/plugins/proxy.py", line 41, in <module>
+ import xml.parsers.expat
+ File "/usr/pkg/lib/python3.4/xml/parsers/expat.py", line 4, in <module>
+ from pyexpat import *
+ ImportError: No module named 'pyexpat'
+
+
+ Creating wiki admin schmonz ...
+ Choose a password:
+ [...]
+
+## What I expected
+
+I expected to get a basic site.
+
+## What happened instead
+
+I got a basic site with some Python error messages.
+
+## Likely fix
+
+Looks like `proxy.py` needs the trick from [[!debbug 637604]] so
+that it can defer a few imports (at least `xml.parsers.expat` and
+the XML-RPC libs) until the methods using them are called. --[[schmonz]]
+
+-----
+
+It's more complicated than I thought. Findings and questions so
+far:
+
+### Failing to load an external plugin should be an error
+
+When a typical Perl plugin fails to load (say, by failing to compile),
+`IkiWiki::loadplugin()` throws an exception. For XML-RPC plugins
+written in any language, ikiwiki assumes loading succeeded.
+
+Let's take [[!iki plugins/rst]] as an example. It's written in
+Python and uses `proxy.py` to handle XML-RPC communication with
+ikiwiki. Let's say that `proxy.py` compiles, but `rst` itself
+doesn't. We'd like ikiwiki to know the plugin isn't loaded, and
+we'd like an error message about it (not just the Python errors).
+
+Now let's say `rst` would be fine by itself, but `proxy.py` doesn't
+compile because some of the Python modules it needs are missing
+from the system. (This can't currently happen on Debian, where
+`libpython2.7` includes `pyexpat.so`, but pkgsrc's `python27`
+doesn't; it's in a separate `py-expat` package.) As before, we'd
+like ikiwiki to know `rst` didn't load, but that's trickier when
+the problem lies with the communication mechanism itself.
+
+For the tricky case, what to do? Some ideas:
+
+- Figure out where in `auto.setup` we're enabling `rst` by default,
+ and stop doing that
+- In pkgsrc's `ikiwiki` package, add a dependency on Python and
+ `py-expat` just in case someone wants to enable `rst` or other
+ Python plugins
+
+For the simple case, I've tried the following:
+
+[[!template id=gitbranch branch=schmonz/external-plugin-loading author="[[schmonz]]"]]
+
+- In `IkiWiki::Plugin::external::import()`, capture stderr
+- Before falling off the end of `IkiWiki::Plugin::external::rpc_call()`,
+ if the command had been 'import' and stderr is non-empty, throw
+ an exception
+- In `IkiWiki::loadplugin()`, try/catch/throw just like we do with
+ regular non-external plugins
+
+With these changes, we have a test that fails when an external
+plugin can't be loaded (and passes, less trivially, when it can).
+Huzzah! (I haven't tested yet whether I've otherwise completely
+broken the interface for external plugins. Not-huzzah!) --[[schmonz]]
Editing the wiki page with non-Latin characters using webinterface also fails with the same error.
Additionally, embedding graphviz graphs non-Latin, leads to he same error.
Observed in ikiwiki versions 3.20130904 and version 3.20140102
+
+> This is probably [[fixed|done]] in 3.20140916. Please provide more
+> information if not. --[[smcv]]
--- /dev/null
+When IkiWiki uses discount to implement [[plugins/mdwn]] rendering,
+there is a workaround for <https://rt.cpan.org/Ticket/Display.html?id=74016>:
+
+<pre><code>$t=~s/<style/<elyts/ig;
+my $r=Text::Markdown::Discount::markdown($t);
+$r=~s/<elyts/<style/ig;
+</code></pre>
+
+However, this workaround also applies to indented text or text in backticks:
+if you write <code>there is a bug involving the \`<style>\` tag</code>,
+or use indentation like
+
+<pre><code>you can use this markup:
+
+ <style type="text/css">...</style>
+</code></pre>
+
+then that gets turned into `<elyts` in the source before passing through
+`markdown`, comes out as `<elyts` in the output HTML, and is rendered
+as `<elyts` by the browser. This makes it quite difficult to talk about
+HTML stylesheet markup on an IkiWiki instance (I had to use raw HTML in
+this bug report's source to avoid the bug).
+
+I think the side-effect of the workaround is more damaging than the actual bug
+being worked around: I've never wanted to write inline style tags in the body of
+a Markdown page (which isn't even valid HTML) but I have certainly wanted to
+discuss style markup several times. The first couple of times I saw this happen,
+I thought it was some sort of misguided anti-cross-site-scripting filter...
+
+--[[smcv]]
If anyone needs to release ikiwiki in a hurry, please delete that test
and we can put it back later. --[[smcv]]
+
+> [[fixed in 3.20140916|done]] --[[smcv]]
I have produced a patch for this issue, but beware, while it appears to fix the problem for me, I have little understanding of perl and the existing code base.
> It looks sound, but I have yet to test it. --[[anarcat]]
+
+>> I reviewed a version of this (possibly rebased or modified or something)
+>> that was in the [[todo/osm_plugin_GeoJSON_popup_patch]] branch,
+>> over on the todo page for that branch. Feel free to move my
+>> review comments for it here if you want to split the discussion. --[[smcv]]
+>> [[!tag reviewed]]
+
+Here's [[smcv]]'s review from [[todo/osm_plugin_GeoJSON_popup_patch]], annotated with my comments. --[[anarcat]]
+
+> It would be good if the commit added documentation for the new feature,
+> probably in `doc/ikiwiki/directive/osm.mdwn`.
+>
+> + my @layers = [ 'OSM' ];
+>
+> You mean `$layers`. `[]` is a scalar value (a reference to an array);
+> `@something` is an array.
+
+>> Or `@layers = ( 'OSM' );`. --[[anarcat]]
+
+>>> Yeah, and then `layers => [@layers]` or `layers => \@layers`
+>>> to turn it into a reference when building `%options`. --s
+
+> + @layers = [ split(/,/, $params{layers}) ];
+>
+> Is comma-separated the best fit here? Would whitespace, or whitespace and/or
+> commas, work better?
+
+>> Why don't we simply keep it an array as it already is? I fail to see the reason behind that change.
+>>
+>>> This seems to be at least partially a feature request for \[[!osm]]:
+>>> "allow individual \[[!osm]] maps to override `$config{osm_layers}`.
+>>> Items in `%config` can be a reference to an array, so that's fine.
+>>> However, parameters to a [[ikiwiki/directive]] cannot be an array,
+>>> so for the directive, we need a syntax for taking a scalar parameter
+>>> and splitting it into an array - comma-separated, whitespace-separated,
+>>> whatever. --s
+>>
+>> This is the config I use right now on http://reseaulibre.ca/:
+>>
+>> ~~~~
+>> osm_layers:
+>> - http://a.tile.stamen.com/toner/${z}/${x}/${y}.png
+>> - OSM
+>> - GoogleHybrid
+>> ~~~~
+>>
+>> It works fine. At the very least, we should *default* to the configuration set in the the .setup file, so this chunk of the patch should go:
+>>
+>> ~~~~
+>> - $options{'layers'} = $config{osm_layers};
+>> ~~~~
+>>
+>> Maybe the best would be to use `$config{osm_layers};` as a default? --[[anarcat]]
+
+> It's difficult to compare without knowing what the values would look like.
+> What would be valid values? The documentation for `$config{osm_layers}`
+> says "in a syntax acceptable for OpenLayers.Layer.OSM.url parameter" so
+> perhaps:
+>
+> # expected by current branch
+> \[[!osm layers="OSM,WTF,OMG"]]
+> \[[!osm layers="http://example.com/${z}/${x}/${y}.png,http://example.org/tiles/${z}/${x}/${y}.png"]]
+> # current branch would misbehave with this syntax but it could be
+> made to work
+> \[[!osm layers="OSM, WTF, OMG"]]
+> \[[!osm layers="""http://example.com/${z}/${x}/${y}.png,
+> http://example.org/tiles/${z}/${x}/${y}.png"""]]
+> # I would personally suggest whitespace as separator (split(' ', ...))
+> \[[!osm layers="OSM WTF OMG"]]
+> \[[!osm layers="""http://example.com/${z}/${x}/${y}.png
+> http://example.org/tiles/${z}/${x}/${y}.png"""]]
+>
+> If you specify more than one layer, is it like "get tiles from OpenCycleMap
+> server A or B or C as a round-robin", or "draw OpenCycleMap and then overlay
+> county boundaries and then overlay locations of good pubs", or what?
+
+>> Multiple layers support means that the user is shown the first layer by default, but can also choose to flip to another layer. See again http://reseaulibre.ca/ for an example. --[[anarcat]]
+
+> + layers => @layers,
+>
+> If @layers didn't have exactly one item, this would mess up argument-parsing;
+> but it has exactly one item (a reference to an array), so it works.
+> Again, if you replace @layers with $layers throughout, that would be better.
+>
+> - $options{'layers'} = $config{osm_layers};
+>
+> Shouldn't the default if no `$params{layers}` are given be this, rather
+> than a hard-coded `['OSM']`?
+
+>> Agreed. --[[anarcat]]
+
+> `getsetup()` says `osm_layers` is `safe => 0`, which approximately means
+> "don't put this in the web UI, changing it could lead to a security flaw
+> or an unusable website". Is that wrong? If it is indeed unsafe, then
+> I would expect changing the same thing via \[[!osm]] parameters to be
+> unsafe too.
+
+>> I put that at `safe=>0` as a security precaution, because I didn't
+>> exactly know what that setting did.
+>>
+>> It is unclear to me whether this could lead to a security flaw. The
+>> osm_layers parameter, in particular, simply decides which tiles get
+>> loaded in OpenLayers, but it is unclear to me if this is safe to change
+>> or not. --[[anarcat]]
+
+> I notice that `example => { 'OSM', 'GoogleSatellite' }` is wrong:
+> it should (probably) be `example => [ 'OSM', 'GoogleSatellite' ]`
+> (a list of two example values, not a map with key 'OSM' corresponding
+> to value 'GoogleSatellite'. That might be why you're having trouble
+> with this.
+
+>> That is an accurate statement.
+>>
+>> This is old code, so my memory may be cold, but i think that the "layers" parameters used to be a hash, not an array, until two years ago (commit 636e04a). The javascript code certainly expects an array right now. --[[anarcat]]
+
+>>> OK, then I think this might be a mixture of a bug and a feature request:
+>>>
+>>> * bug: the configuration suggested by the example (or the default when
+>>> unconfigured, or something) produces "TypeError: mapProjection is null"
+>>>
+>>> * feature request: per-\[[!osm]] configuration to complement the
+>>> per-wiki configuration
+>>>
+>>> --s
+>>>
+>>>> That is correct. --[[anarcat]]
>> I suppose what I would like would be to not need to use a `<base href>` in searching at all.
>> --[[KathrynAndersen]]
+
+>>> `<base href>` is *not* required to be absolute in HTML5, so when
+>>> `html5: 1` is used, I've changed it to be host-relative in most cases.
+>>> I think that at least partially addresses this bug report,
+>>> particularly if we [[todo/generate HTML5 by default]] like I've suggested.
+>>>
+>>> The `<base>` is there so we can avoid having to compute how to
+>>> get to (the virtual directory containing) the root of the wiki from
+>>> `ikiwiki.cgi`, which might well be somewhere odd like `/cgi-bin/`.
+>>> I think there are probably other things that it fixes or simplifies.
+>>> --[[smcv]]
available to do consulting or other work on ikiwiki.
* [[Joey]] wrote ikiwiki. He is available for consulting on a part-time basis.
+* [[Amitai Schlair]] (a.k.a. [[schmonz]]) wrote [[rcs/cvs]],
+ [[plugins/rsync]], and [[todo/fancypodcast]], among other things.
+ Contact him via [his website](http://www.schmonz.com/).
Feel free to add yourself to this list.
--- /dev/null
+In [[plugins/write|plugins/write]] I am told that
+
+> The `%destsources` hash records the name of the source file used to
+> create each destination file. The key is the output filename (ie,
+> "foo/index.html"), and the value is the source filename that it was built
+> from (eg, "foo.mdwn").
+
+but that's not what I see happening. I see a hash that maps the output
+filename (eg., "foo/index.html") to a source _page_ name (eg., "foo").
+
+I need an additional visit to `%pagesources` to get from "foo"
+to "foo.mdwn".
+
+It seems to be consistent and happily working that way, so maybe the
+description in [[plugins/write]] is the only mistake. Have I missed something?
+
+-- [[jcflack]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 1"
+ date="2014-09-23T07:55:24Z"
+ content="""
+You were correct, thanks. I've fixed it.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 2"
+ date="2014-10-05T22:56:56Z"
+ content="""
+In git master, if `$config{html5} = 1` then the `<base>` URL
+will usually be host-relative or protocol-relative (`/wiki/` or
+`//example.com/wiki/`) which reduces the need for that option.
+
+This is not yet available in a release, and is still subject to
+change.
+
+I still don't know what your wiki's configuration is, because you
+never told us the settings I asked for (`cgiurl` and `url`), so
+I don't know whether this will help you.
+"""]]
--- /dev/null
+From the latest homebrew, it prompts the following seearch result:
+
+ $ brew search ikiwiki
+ No formula found for "ikiwiki".
+ Searching pull requests...
+ Closed pull requests:
+ Add ikiwiki (https://github.com/Homebrew/homebrew/pull/5355)
+ Add ikiwiki formula (https://github.com/Homebrew/homebrew/pull/5358)
+
+Reading the messages on those two GitHub links, it seems the request of adding Ikiwiki into Homebrew is rejected for too much Perl dependencies but an "HomeBrew-Alt" is possible. Does anyone know if Ikiwiki is added to this "HomeBrew-Alt"? How to install Ikiwiki on Mac OS X using "Homebrew-Alt"? I'm desperate in getting Ikiwiki to work on my Mac computers.
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlcaGfdn9Kye1Gc8aGb67PDVQW4mKbQD7E"
+ nickname="Amitai"
+ subject="comment 1"
+ date="2014-10-14T22:41:59Z"
+ content="""
+I don't use Homebrew and can't speak for it, but have you tried the suggestion in [[tips/ikiwiki on mac os x]]?
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ ip="91.65.196.164"
+ subject="comment 2"
+ date="2014-10-15T12:33:28Z"
+ content="""
+I second that request. Although it is possible to install ikiwiki like amitai suggests, it would be a great convenience to be able to just \"brew install ikiwiki\".
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlcaGfdn9Kye1Gc8aGb67PDVQW4mKbQD7E"
+ nickname="Amitai"
+ subject="comment 3"
+ date="2014-10-15T13:43:24Z"
+ content="""
+Maybe someone reading this is able to act on your request. In case that's not true, I'd suggest investigating a few questions on the Homebrew side of things:
+
+7. From Homebrew's 2011 point of view, why did flangy consider [\"large number of perl dependencies\"](https://github.com/Homebrew/homebrew/pull/5358) to be a basis for rejection?
+7. From Homebrew's 2014 point of view, is that rationale still considered valid?
+7. If so, then does Homebrew make it easy for users to install formulae from repositories other than the official one?
+7. If so, then is there an existing non-official repository that either contains an ikiwiki formula or would be willing to accept one?
+
+Since I already use pkgsrc for all my packages (not only ikiwiki) on all my systems (not only OS X), I'm unmotivated to pursue this line of inquiry for possibly zero benefit. If you're already invested in Homebrew, and happy about it, then perhaps it's worth it to you to get this figured out.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ ip="91.65.196.164"
+ subject="comment 4"
+ date="2014-10-15T18:49:16Z"
+ content="""
+I asked the homebrew people on freenode. Here's what I got:
+
+My Question: (Hopefully) quick question: There has been an interest in having a brew formula for the static wiki generator ikiwiki (see http://ikiwiki.info/forum/Can_someone_add_Ikiwiki_in_Homebrew__63__/) in 2011 a pull request for that (https://github.com/Homebrew/homebrew/pull/5358) was closed due to too many perl dependencies. It was suggestet, that one uses homebrew-alt instead. Since homebrew-alt doesn’t seem to exist (anymore), I was curious what one can do to move this request forward.
+
+Answer: You can always host a formula yourself without having it accepted into homebrew core, using the tap mechanism. We have some formulas for packages written in Python where we explicitly fetch and install the dependencies into the parent formula's prefix; check out ansible.rb for an example. If you can do something like that for ikiwiki's dependencies, that would probably go through.
+
+I'll look into it, but have to admit, that I have very little time right now (and am also not sure, if I am able to produce a working brew formula…). But maybe somebody else now has enough info to get startet.
+"""]]
--- /dev/null
+I try to merge my existing blog with my wiki. I just started the process and ran into a problem:
+
+I created a blog in my ikiwiki install and wanted to import my blogposts, that are just a bunch of (octopress) text files. Of course, ikiwiki can read them, etc. Here's my problem: When adding an old post to git, the imported blog article in my wiki is shown to be posted today. This is not the desired behaviour, since I have published this article a while before. The only way to change this I found was to fiddle around with the commit where I added the post. A way to this is described in [this stackoverflow answer](http://stackoverflow.com/a/454750).
+
+This works. Well, almost.
+
+For some weird reason the "posted" time is off one hour. Let's say, I published an article Fri Mar 2 01:30:00 2012. I corrected the commit as outlined by the link I provided. Ikiwiki will show that the article got posted at Fri Mar 2 00:30:00 2012. The only reason I can think of, that could produce this error, is DST. Has anyone an idea how to correct this error? Did I do something wrong or did I overlook something?
+
+Any help is appreciated!
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 1"
+ date="2014-09-24T07:27:38Z"
+ content="""
+It does sound as though you have some sort of DST issue going on.
+Did you specify the time zone taking into account DST
+(e.g. if you are in USA Eastern time (UTC-05:00) and you wrote
+a blog post in summer, you'll want to use -0400)?
+
+You don't need to alter the git commit dates, you can use
+something like \[[!meta date=\"2014-09-24 08:26:05+0100\"]]
+which takes precedence over the commit date from git.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ ip="141.23.120.160"
+ subject="comment 2"
+ date="2014-09-24T13:19:58Z"
+ content="""
+Thank you for pointing out the meta directive to me! This was exactly what I was looking for. :) Also, you were right, I didn't use the right offset. Everything works now.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 2"
+ date="2014-10-05T22:55:01Z"
+ content="""
+I have added a new `$config{reverse_proxy}` option in git master
+which applies the necessary hard-coding.
+
+Also in git master, if `$config{html5} = 1` then the `<base>` URL
+will usually be host-relative or protocol-relative (`/wiki/` or
+`//example.com/wiki/`) which reduces the need for that option.
+
+These are not yet available in a release, and are still subject to
+change.
+"""]]
--- /dev/null
+Hey everyone, I have the following problem: I am writing a German Wiki and therefore use umlauts (ä,ö,ü…) quite often in my writing. When I am not using the browser for editing the wiki (or writing comments) everything is fine. Since I want other poeple to be able to post comments or use the discussion pages I need to get the encoding to work. Here is what is happening when I'm trying to edit a discussion page:
+
+* Writing the comment: ![](http://f.cl.ly/items/2F3u36261z2N141T343E/Screen%20Shot%202014-10-12%20at%2017.54.06.png)
+* Checking the comment via the "preview": ![](http://f.cl.ly/items/3O1c2G011u2x0E2s0o3q/Screen%20Shot%202014-10-12%20at%2017.54.17.png)
+* Pressing "cancel" since umlauts don't work: ![](http://f.cl.ly/items/141P2M1v323g1J2H3220/Screen%20Shot%202014-10-12%20at%2017.54.30.png)
+
+As I said, when I'm not posting from the browser everything works fine. Here's what I've checked/done to prevent the encoding error:
+
+* I put "export LANG=de_DE.UTF-8" and "export LANGUAGE=de_DE.UTF-8" in my .bashrc
+* I set "locale:" to "de_DE.UTF-8" in my ikiwiki .setup-File
+
+What else could there be wrong? What else could I try to solve the problem?
+
+Any ideas are appreciated! Thanks in advance!
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlcaGfdn9Kye1Gc8aGb67PDVQW4mKbQD7E"
+ nickname="Amitai"
+ subject="fixed in a recent release, I think"
+ date="2014-10-12T16:40:17Z"
+ content="""
+What version of ikiwiki are you running? I believe this was fixed in [[news/version 3.20140916]], with the patch from [[bugs/garbled non-ascii characters in body in web interface]].
+
+Related reading:
+
+- [[forum/\"Error: cannot decode string with wide characters\" on Mageia Linux x86-64 Cauldron]]
+- [[forum/build error: Cannot decode string with wide characters]]
+- [[todo/should use a standard encoding for utf chars in filenames]]
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="spalax"
+ ip="82.233.196.200"
+ subject="Plugin compile"
+ date="2014-10-04T10:37:16Z"
+ content="""
+Hello,
+I enventually wrote a plugin that might fit your need: [[plugins/contrib/compile]].
+
+In setup file, you specify which command is to be applied to files. For instance, to convent `odt` files to `pdf`, you can use:
+
+ compile_filetypes = '
+ \"odt\": {
+ \"build\": \"libreoffice --headless --convert-to pdf %{srcname}s\",
+ \"destname\": \"%{basename}s.pdf\"
+ }
+ }'
+
+Then, in your wiki pages, you can use `\[[!compile files=\"foo.odt\"]]`. This will convert file to pdf, and render as a link to the `pdf` file. If option `inline` is set, you can also simply use a wikilink `\[[foo.odt]]`, which will have the same effect.
+
+The only problem I see is that when linking several times to the same file, it will be compiled several times. I marked it as [[a feature request|http://atelier.gresille.org/issues/420]] to the plugin.
+
+Regards,
+-- [[Louis|spalax]]
+
+"""]]
--- /dev/null
+Hello,
+
+I've setup authentication on my ikiwiki website using httpauth plugin. I've also disabled anonok, openid and passwordauth so that httpauth is the unique authentication method. I've configured the `cgiauthurl` to https://example.com/auth/ikiwiki.cgi in order to make the authentication more secured (password is never sent in clear). My `url` points to http://example.com/ and my `cgiurl` points to http://example.com/ikiwiki.cgi .
+
+When I try to edit a page accessed by http, everything works fine: there is a redirection to https://example.com/auth/ikiwiki.cgi (defined in `cgiauthurl`) and my browser launches an HTTP Basic Authentication login form. But when I try to edit a page accessed by https there is no redirection to the `cgiauthurl` url. Instead, I can edit (and save) the page without authentication. I've tried this with a fresh new browser session where I have never been asked for authentication before. It seems that editing pages directly from https://example.com/ikiwiki.cgi?page=page&do=edit works without authentication...
+
+I think that the Ikiwiki CGI do not redirect to `cgiauthurl` when it is accessed by HTTPS.
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlcaGfdn9Kye1Gc8aGb67PDVQW4mKbQD7E"
+ nickname="Amitai"
+ subject="comment 1"
+ date="2014-10-14T22:25:13Z"
+ content="""
+I have a site like this and can't reproduce the bug. What version of ikiwiki are you running? Can you post your ikiwiki.setup, and perhaps also your web server configuration?
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 2"
+ date="2014-10-15T23:26:52Z"
+ content="""
+I can't reproduce this either.
+
+Do you perhaps still have an ikiwiki login session cookie stored in your browser
+from when you previously used passwordauth or openid?
+(In Firefox: Edit->Preferences, Privacy tab, Show Cookies.)
+
+The login sessions that are considered to be valid are stored in `.ikiwiki/sessions.db`
+in your wiki's `srcdir`.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 3"
+ date="2014-10-15T23:30:21Z"
+ content="""
+It might also be interesting to visit your wiki's preferences page
+(`ikiwiki.cgi?do=prefs`) which should tell you who you are logged-in as.
+If you \"view source\" it will also show you your session ID, which should match
+what's in the `ikiwiki_session_something` cookie.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawk8U772S3jDrZJCO0WA5WaDLjJv5mMl6Yw"
+ nickname="Nadine"
+ subject="It was an Apache problem..."
+ date="2014-10-16T14:57:26Z"
+ content="""
+Hello,
+
+thank you for your comments. The problem comes from the Apache configuration. I use a git-http-backend on this server and I affect the content of the REMOTE_USER environment variable like this:
+
+ SetEnv REMOTE_USER=$REDIRECT_REMOVE_USER
+
+Ikiwiki CGI seems to use this variable to determine which is the current user. Even if the variable content is NULL, ikiwiki.cgi use it.
+
+I just changed this to:
+
+ SetEnvIf Request_URI \"^/git/\" REMOTE_USER=$REDIRECT_REMOVE_USER
+
+and everything runs Ok now...
+
+Sorry for bothering Ikiwikiboard with an HTTP server problem.
+
+"""]]
Server has an older ikiwiki installed but I'd like to use a newer version from git, and I don't have root access.
> You can't set `PERL5LIB` in `ENV` in a setup file, because ikiwiki is already
-> running before it reads that, and so it has little effect. Your error
+> running before it reads that, and so it has little effect.
+>> That's [fixed now](http://source.ikiwiki.branchable.com/?p=source.git;a=commitdiff;h=9d928bd69496648cd7a2d4542a2d533992c01757;hp=f574bc2ed470b60f576a2906998bc7c129f2f983)
+>> for anything invoked through the generated wrappers: they put all the setup `ENV`
+>> values into the real environment before starting Perl. (When running `ikiwiki` at the
+>> command line, `PERL5LIB` just has to be in the environment, as it would normally be.)
+>
+> Your error
> messages do look like a new bin/ikiwiki is using an old version of
> `IkiWiki.pm`.
>
--- /dev/null
+A while ago I added RTL text support to my wiki:
+
+<http://ikiwiki.info/tips/Right-to-left___40__RTL__41___page_text>
+
+But this support does not work with PO files. When I write a page in
+English, I need the Hebrew/Arabic translation to have additional text
+(in my case, using the template directive) which causes the direction of the
+text to be RTL.
+
+I saw a recent patch which claims to solve the problem by exposing the
+language code and direction to the templates (which would help a lot), but
+when I go to the original website from which it came, it looks like the Arabic
+text is still aligned LTR just like English:
+
+<http://addons.nvda-project.org/index.ar.html>
+
+Another issue is that I use Debian stable, and I'm not sure it's safe to
+use some unstable ikiwiki (currently I use the version from backports) -
+advice welcome :-)
+
+It's still important to have the ability to change direction inside the page,
+but the default direction specified either in CSS on in the page.tmpl file
+should be dynamic. I didn't check how the PO plugin works, but it may be
+necessary to update there, because if all it does is copy the HTML page and
+switch strings with translations, it must be modified to also edit the
+LTR/RTL directives so that different translations of the same page can have
+different directions.
+
+I hope I'll have some time to look into it myself, I'm just a bit behind now
+with non-recent ikiwiki version (maybe it's time for me to try sid or from
+source).
+
+--[[fr33domlover]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 11"
+ date="2014-09-21T12:50:11Z"
+ content="""
+> Yes, [a RTL meta tag] would help. But I'd need the PO plugin to
+> respect this lang tag. Is this already possible?
+
+Do you need the po plugin at all? The po plugin is specifically for
+sites that are written in a master language (usually English) and
+then translated into a bunch of other languages - the same general
+approach as <https://www.debian.org/> (that site does not use IkiWiki
+but the idea is the same).
+
+If you're selecting languages in some other way -
+e.g. all your content is in Arabic except that the `/programming/`
+subtree is in English, or something like that - then the po
+plugin is not designed for what you're doing, and adding support
+for a new meta tag to the meta plugin would be a better way to
+get the language code into the header. It could use the same
+`TMPL_VAR` hooks in page.tmpl that po does? I'd review a patch.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 11"
+ date="2014-09-21T13:06:24Z"
+ content="""
+> Maybe if you could set dir to \"flip\" instead of \"rtl\" and \"ltr\"
+
+This feature does not exist in HTML. The allowed values for dir
+are rtl, ltr, and auto (where auto means \"please guess based on the
+content\" - see the HTML5 spec for the exact algorithm used).
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="next steps"
+ date="2014-09-17T08:09:50Z"
+ content="""
+HTML5 says:
+
+> Authors are strongly encouraged to use the dir attribute to indicate text direction rather than using CSS, since that way their documents will continue to render correctly even in the absence of CSS (e.g. as interpreted by search engines).
+
+Could you test whether your tip works with `<div dir=\"rtl\">` or something,
+please? If it does, please change the tip, if not, we'll have to look at
+whether the [[plugins/htmlscrubber]] is getting in the way.
+
+After that, I think the next step towards good RTL support would be to
+put together some test-cases for things that are meant to work, in the
+form of:
+
+* self-contained source code and setup file for a very simple wiki
+* the pages in that wiki making it clear what their intended text
+ direction is (e.g. \"this paragraph should be right to left\")
+
+As far as I know, none of the IkiWiki committers can read any RTL
+languages, so if you use Arabic or Hebrew or whatever in those
+test-cases, we'll need a screenshot/image of what it's meant to look
+like. Using Latin text marked as RTL (so it should come out backwards
+if everything is working correctly) might be easier.
+
+The obvious cases that I can think of are:
+
+* the wiki is \"mostly\" in a RTL language
+* the master language is LTR but the [[plugins/po]] plugin
+ provides a translation into a RTL language
+
+and possibly
+
+* the master language is RTL but the [[plugins/po]] plugin
+ provides a translation into a LTR language
+
+It might be necessary to add support for a per-wiki, per-page or
+(for po) per-translation-language direction override that would set
+the `<html dir>` attribute, but we should find test-cases first, then we
+can work out solutions.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 2"
+ date="2014-09-17T08:19:38Z"
+ content="""
+If I'm interpreting that Arabic website correctly, it *is* RTL, but
+left-justified (which is a somewhat confusing CSS glitch, but hopefully
+not a barrier to understanding by people who can read Arabic). English
+words embedded in the Arabic are LTR, but my understanding of the bidi
+algorithm is that that's meant to happen.
+
+For instance, in the English version, the last paragraph before the inline says:
+
+> Please feel free to subscribe to the rss or atom feeds to be informed on when new addons or a new version of an addon is made available. The following community supported addons are available:
+
+and in the Arabic version, the last paragraph looks like this in my browser
+(where `*****` represents Arabic that I don't know how to read):
+
+ : ***** (... lots more ....) ***** atom feeds * rss **** ****
+
+So that looks right for RTL: the colon is at the end (left), and the
+mentions of rss feeds and atom feeds are at the beginning (right).
+When I \"view source\", it's the other way round.
+
+Also, the page source says:
+
+ <html xmlns=\"http://www.w3.org/1999/xhtml\" lang=\"ar\" xml:lang=\"ar\" dir=\"rtl\">
+
+which looks right?
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 3"
+ date="2014-09-17T08:29:48Z"
+ content="""
+> I saw a recent patch which claims to solve the problem by exposing the language code and direction to the templates
+
+It looks as though you mean [[mhameed]]'s change from
+[[todo/expose_html_language_and_direction]], which exposed them to the
+templates, but did not modify the default `page.tmpl` to make use
+of them. Perhaps you or mhameed could provide a `page.tmpl` patch?
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="fr33domlover"
+ ip="46.117.109.179"
+ subject="comment 4"
+ date="2014-09-17T11:22:57Z"
+ content="""
+> Could you test whether your tip works with \<div dir=\"rtl\"> or something, please?
+
+Sure, I will check that soon. I think it does, I just tried here in ikiwiki. Just curious, why is
+div preferred? IIRC I use \"class\" there after looking at some existing templates. But
+I'm not an expert, especially not in CSS. Would that be used as an HTML4 parallel of the dir attribute?
+
+As to that website with the patch, the problem is that the text is aligned to the left. When
+I type Hebrew in an LTR page, it already shows more or less correctly - English words are
+shown in correct letter order thanks to the bidi algorithm. The issue seems to be aligning
+to the right - that is what my tip does. Maybe the direction setting in the CSS also has other
+effects - I just know it works :-)
+
+I'll happily help with the tests. I also have a test page on my wiki which uses many ikiwiki
+features, to demonstrate how they all look in RTL. Test case ideas:
+
+- Page in RTL (e.g. Arabic) with an LTR paragraph (e.g. English)
+- Page in RTL with LTR paragraph in the same language (e.g. fancy way to write a poem)
+- Page in LTR (e.g. English) with an RTL paragraph (e.g. Hebrew)
+- Page in LTR with RTL paragraph in the same language (poem again)
+- Translated page - master language is LTR, slave is RTL
+- Translated page - master language is RTL, slave is LTR
+- Master LTR page has RTL paragraph, all slaves have it RTL too regardless of their global direction
+- Master RTL page has LTR paragraph, all slaves have it LTR too regardless of their global direction
+
+An example for the last 2 tests is an English master page about linguistics which has a paragraph in some
+RTL language that is being studied, and all slave pages must keep that paragraph intact - both the
+text itself and its RTL direction. But the rest of the page can be translated and correctly made RTL when
+translated to RTL languages.
+
+This gives me another idea - most of the time what you actually mean is to reverse the direction: RTL
+becomes LTR and vice versa. When writing some fancy poem, that's what you probably want. But in the
+previous example, the direction should not be reversed - so there should maybe be two kinds of direction
+modifiers:
+
+1. Dynamic (the default) - You write e.g. a master page in LTR and some RTL paragraphs. an RTL translation
+ automatically reverses directions, RTL <=> LTR.
+2. Fixed - this is like my tip, e.g. An RTL paragraph in an LTR page has a fixed direction set, which is kept even in
+ translations for RTL languages - the page in general is reversed, but that paragraph is not.
+
+Another very useful thing (at least to me) would be an option to have different wiki pages/section with
+different master languages. I have sections in English and sections in Hebrew, which makes the PO
+plugin a problem to use, unless I keep one of these sections untranslated.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 5"
+ date="2014-09-17T11:35:07Z"
+ content="""
+`<div>` is not specifically preferred, any block-level element will do
+(e.g. `<p>`); but `<div>` is something you can wrap around any block,
+so it's good for a generic `\[[!template]]`.
+
+The difference between the use of a `dir` attribute and the use
+of a `class` attribute is that `dir` has a spec-defined semantic
+meaning in HTML4 and HTML5: search engines can look at
+`<div dir=\"rtl\">` and know that it is definitely right-to-left.
+
+`<div class=\"rtl\">` *might* mean right-to-left, but it could equally
+well mean (for instance) documentation about a run-time library,
+or something; classes have no built-in semantic meaning that generic
+user-agents like browsers and search engines can rely on.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawlcaGfdn9Kye1Gc8aGb67PDVQW4mKbQD7E"
+ nickname="Amitai"
+ subject="comment 6"
+ date="2014-09-17T14:24:38Z"
+ content="""
+smcv wrote:
+
+> As far as I know, none of the IkiWiki committers can read any RTL languages
+
+I read Hebrew well enough to detect chirality errors (e.g., L-Hebrew in an R-Hebrew universe). --[[schmonz]]
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 7"
+ date="2014-09-17T15:52:36Z"
+ content="""
+LTR with embedded RTL, or vice versa, sounds like a job for
+the [[tips/Right-to-left___40__RTL__41___page_text]] tip or
+something very similar.
+
+> Maybe the direction setting in the CSS also has other effects
+
+https://html.spec.whatwg.org/#the-dir-attribute suggests that the
+`dir` attribute is meant to be sufficient, but perhaps it's overridden
+by an explict `text-align: left`?
+
+> most of the time what you actually mean is to reverse the direction:
+> RTL becomes LTR and vice versa
+
+I don't think \"I know I am switching between English and Arabic,
+but I don't know which one I'm currently writing\" is a major use-case :-)
+
+> an option to have different wiki pages/section with different master
+> languages
+
+It sounds as though the po plugin is not really what you want, and
+you'd be better off with being able to write
+`\[[!meta lang=ar dir=rtl]]` or something.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="fr33domlover"
+ ip="194.90.37.82"
+ subject="comment 8"
+ date="2014-09-20T12:50:13Z"
+ content="""
+> I don't think \"I know I am switching between English and Arabic, but I don't know which one I'm currently writing\" is a major use-case
+
+Sure you need to know, but there's a difference between \"make the whole text RTL\" or \"make the LTR text RTL and make the RTL text LTR\".
+It depends on what the user means.
+
+> It sounds as though the po plugin is not really what you want, and you'd be better off with being able to write \[[!meta lang=ar dir=rtl]] or something.
+
+Yes, that would help. But I'd need the PO plugin to respect this lang tag. Is this already possible?
+
+> Could you test whether your tip works with <div dir=\"rtl\"> or something, please?
+
+I did and it works, but there's an issue: In both ways - my CSS and the dir attribute - some things don't work right, for example, right-aligned floating
+boxes need to be made left-aligned. How do I handle this with dir? I don't know CSS, but I think it may be possible to define CSS for various page
+elements differently when in the \".rtl\" class - am I right? Another problem is that the horizontal bars of polls (poll plugin) still go LTR. Is there a way
+to fix these things without CSS class? With the notebox template, the right-alignment uses CSS so if we want to use 'dir' it needs to be changed
+to something non-CSS with correct semantics. Maybe if you could set dir to \"flip\" instead of \"rtl\" and \"ltr\", to mean \"the direction opposite to the
+page's direction\". And that new 'dir' would also need to control alignment, since right now notebox is not affected by the dir like I said. The text inside
+does become RTL but the box is still on the right like in LTR.
+
+I don't see other issues but there are plugins I didn't try, e.g. does the box generated by pagetstats directive align to the left in RTL?
+
+If CSS isn't needed for this I'll update the tip, otherwise I should probably fix these issues (notebox-rtl and poll) and add this to the CSS in
+the tip, until there is a better solution (and even then, people will be running previous ikiwiki versions without the solution).
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 9"
+ date="2014-09-21T12:41:56Z"
+ content="""
+[Authoring HTML: Handling Right-to-left Scripts](http://www.w3.org/TR/i18n-html-tech-bidi/)
+might be useful reading.
+
+In [modern browsers](http://caniuse.com/css-sel2) (i.e. not IE6) it
+should be possible to make CSS conditional on any attribute, not
+just class, so you could maybe do something like this:
+
+ /* ikiwiki's existing CSS */
+ .sidebar {
+ float: right;
+ margin-left: 4px;
+ }
+
+ /* new */
+ html[dir=\"rtl\"] .sidebar {
+ float: left;
+ margin-left: 0px;
+ margin-right: 4px;
+ }
+
+If you contributed a patch for `style.css` to make markup like this
+\"just work\", I'd be happy to review it. (`notebox` could use the
+same technique).
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="fr33domlover"
+ ip="46.117.109.179"
+ subject="comment 2"
+ date="2014-09-17T06:57:41Z"
+ content="""
+I couldn't figure out how to make a comment on the commandline so I made this:
+
+[[forum/PO_and_RTL_support]]
+
+The Arabic pages on your wiki seem to have the Arabic in LTR, instead of the intended
+RTL. The reason may be that the PO plugin does not generate each slave page from scratch,
+but rather uses the original page, which causes slave pages to have language 'en' and direction
+LTR. I didn't verify this yet. If you do check this, please share results here :)
+
+What I got to work so far is RTL chunks inside LTR pages. It doesn't replace the PO plugin but
+it can be used to make PO+RTL work:
+
+[Right-to-left (RTL) page text](http://ikiwiki.info/tips/Right-to-left___40__RTL__41___page_text)
+"""]]
--- /dev/null
+I'm working on consolidating my blog and wiki with ikiwiki. I have the following question: Is it possible to serve a blog under a different subdomain? For example: URL of the wiki: ```wiki.example.com``` and I would like to be able to reach the blog under the URL ```blog.example.com```. The permalink structure right now looks like this: ```wiki.example.com/blog/post/``` is it possible to rewrite it so it is served as ```blog.example.com/post/```? I don't even know if this is a question for the ikiwiki forum, but I need to start somewhere.
+
+Thanks in advance for any ideas on how to accomplish that!
--- /dev/null
+[[!comment format=mdwn
+ username="spalax"
+ ip="82.233.196.200"
+ subject="Several .setup files"
+ date="2014-09-27T06:18:29Z"
+ content="""
+I am not sure to have understood your question, but I assume the following:
+
+- your source wiki contains some subdirectory `blog`;
+- you want the whole wiki to be served as [[wiki.example.com]]
+- you want the `blog` subdirectory serves as [[blog.example.com]]
+
+If so, what you can do is having two different setup files.
+
+- the first one would contain (using the old setup file format, but you can adapt it for the new one):
+
+ srcdir => \"/path/to/your/source/wiki/\",
+ url => \"http://wiki.example.com\",
+
+- and the second one would contain:
+
+ srcdir => \"/path/to/your/source/wiki/blog\",
+ url => \"http://blog.example.com\",
+
+I hope I answered your question.
+
+-- [[Louis|spalax]]
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="spalax"
+ ip="82.233.196.200"
+ subject="Apache redirection"
+ date="2014-09-27T06:20:09Z"
+ content="""
+I think you can also (assuming you are using Apache2, and having some control over it) make apache redirect [[blog.example.com]] to [[wiki.example.com/blog]].
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ ip="81.100.115.242"
+ subject="comment 6"
+ date="2014-10-05T22:54:06Z"
+ content="""
+> One way to solve this would be a new `$config{hard_code_urls}` option
+
+I have added basically this in git master. It isn't in a release yet,
+and I renamed it to `$config{reverse_proxy}`.
+
+Also in git master, if `$config{html5} = 1` then the `<base>` URL
+will usually be host-relative or protocol-relative (`/wiki/` or
+`//example.com/wiki/`) which reduces the need for that option.
+
+These are still subject to change, for now.
+"""]]
--- /dev/null
+Ok, I'm trying to get into a good routine where I keep ikiwiki updated for my site.
+The plan is to keep a local (partial) git clone, and rebuild from tagged release commits.
+
+I started out with 3.20140831, more or less like this:
+
+ $ git clone ...
+ $ git checkout 3.20140831
+ $ perl Makefile.PL PREFIX=$HOME/blah
+ $ make
+ $ make install
+ $ cd ~/my-wiki-admin-stuff
+ $ ~/blah/bin/ikiwiki --dumpsetup=my.wiki.setup
+ $ vi my.wiki.setup
+ $ ~/blah/bin/ikiwiki --setup my.wiki.setup
+
+So far so good. Fast-forward (heh) to 3.20140916:
+
+ $ git fetch ...
+ $ git merge --ff-only FETCH_HEAD
+ $ git checkout 3.20140916
+ $ perl Makefile.PL ... ; make ; make install
+ $ cd ~/my-wiki-admin-stuff ; ~/blah/bin/ikiwiki --setup my.wiki.setup
+
+This happened to work, but in general if the new ikiwiki version had new
+setup options I needed to know about, I wouldn't necessarily find out, and
+I could be running the new version with something important taking an
+inappropriate default because I didn't add it to the setup file.
+
+What I'm looking for is some sort of
+
+ ikiwiki --read-my-current-setup-file-and-write-one-with-the-same-config-but-with-new-options-shown-and-commented
+
+command.
+
+Will `ikiwiki --changesetup setupfile` do what I'm looking for?
+
+I'm hoping for something simple that takes care of the way the setup sections are conditional
+(so `--dumpsetup` doesn't even show you `git` options unless it knows `rcs=git`, so ideally it
+would look in the current setup to learn what to dump).
+
+How are other folks handling this routinely?
--- /dev/null
+could we get email notifications going here? [[plugins/notifyemail]] is installed on the git-annex wiki, and it works really well to followup on the pages i participate in... thanks! --[[anarcat]]
* [[intrigeri]] `git://gaffer.ptitcanardnoir.org/ikiwiki.git`
* [[gmcmanus]] `git://github.com/gmcmanus/ikiwiki.git`
* [[jelmer]] `git://git.samba.org/jelmer/ikiwiki.git`
-* [[hendry]] `git://webconverger.org/git/ikiwiki`
* [[jon]] `git://github.com/jmtd/ikiwiki.git`
* [[ikipostal|DavidBremner]] `git://pivot.cs.unb.ca/git/ikipostal.git`
* [[ikimailbox|DavidBremner]] `git://pivot.cs.unb.ca/git/ikimailbox.git`
* [[pelle]] `git://github.com/hemmop/ikiwiki.git`
* [[chrismgray]] `git://github.com/chrismgray/ikiwiki.git`
* [[ttw]] `git://github.com/ttw/ikiwiki.git`
-* [[anarcat]] `git://src.anarcat.ath.cx/ikiwiki`
+* [[anarcat]] `git://src.anarc.at/ikiwiki`
* anderbubble `git://civilfritz.net/ikiwiki.git`
* frioux `git://github.com/frioux/ikiwiki`
* llipavsky `git://github.com/llipavsky/ikiwiki`
The `comment` directive is supplied by the
-[[!iki plugins/comments desc=comments]] plugin, and is used to add a comment
-to a page. Typically, the directive is the only thing on a comment page,
-and is filled out by the comment plugin when a user posts a comment.
+[[!iki plugins/comments desc=comments]] plugin. There should
+be one comment directive in each source file with extension
+`._comment` or `._comment_pending`, and the directive should not
+appear anywhere else. Comments are normally created via the web,
+in which case ikiwiki automatically creates a suitable
+`._comment` file.
+
+Wiki administrators can also commit comment files to the version
+control system directly: they should be named starting with
+the *comments\_pagename* config option (usually `comment_`)
+and ending with `._comment`, for instance `comment_42._comment`.
Example:
## usage
The only required parameter is `content`, the others just add or override
-metadata of the comment.
+metadata for the comment. Many parameters are shortcuts for [[meta]]
+directives.
* `content` - Text to display for the comment.
Note that [[directives|ikiwiki/directive]]
[[blogging|blog]] and [[podcasting|podcast]], as well as a large
array of [[plugins]].
+Alternatively, think of ikiwiki as a particularly flexible static
+site generator with some dynamic features.
+
[[!template id=links]]
## using ikiwiki
+++ /dev/null
-ikiwiki 3.20140125 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * inline: Allow overriding the title of the feed. Closes: #[735123](http://bugs.debian.org/735123)
- Thanks, Christophe Rhodes
- * osm: Escape name parameter. Closes: #[731797](http://bugs.debian.org/731797)"""]]
\ No newline at end of file
--- /dev/null
+ikiwiki 3.20140916 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Don't double-decode CGI submissions with Encode.pm >= 2.53,
+ fixing "Error: Cannot decode string with wide characters".
+ Thanks, [[Antoine Beaupré|anarcat]]
+ * Avoid making trails depend on everything in the wiki by giving them
+ a better way to sort the pages
+ * Don't let users post comments that won't be displayed
+ * Fix encoding of Unicode strings in Python plugins.
+ Thanks, [[chrysn]]
+ * Improve performance and correctness of the \[[!if]] directive
+ * Let \[[!inline rootpage=foo postform=no]] disable the posting form
+ * Switch default \[[!man]] shortcut to manpages.debian.org. Closes: #[700322](http://bugs.debian.org/700322)
+ * Add UUID and TIME variables to edittemplate. Closes: #[752827](http://bugs.debian.org/752827)
+ Thanks, Jonathon Anderson
+ * Display pages in linkmaps as their pagetitle (no underscore escapes).
+ Thanks, [[chrysn]]
+ * Fix aspect ratio when scaling small images, and add support for
+ converting SVG and PDF graphics to PNG.
+ Thanks, [[chrysn]]
+ - suggest ghostscript (required for PDF-to-PNG thumbnailing)
+ and libmagickcore-extra (required for SVG-to-PNG thumbnailing)
+ - build-depend on ghostscript so the test for scalable images can be run
+ * In the CGI wrapper, incorporate $config{ENV} into the environment
+ before executing Perl code, so that PERL5LIB can point to a
+ non-system-wide installation of IkiWiki.
+ Thanks, Lafayette Chamber Singers Webmaster
+ * filecheck: accept MIME types not containing ';'
+ * autoindex: index files in underlays if the resulting pages aren't
+ going to be committed. Closes: #[611068](http://bugs.debian.org/611068)
+ * Add \[[!templatebody]] directive so template pages don't have to be
+ simultaneously a valid template and valid HTML
+ * Add [[smcv]] to Uploaders and release to Debian"""]]
--- /dev/null
+[[!meta author="spalax"]]
+[[!template id=plugin name=compile author="[[Louis|spalax]]"]]
+
+# Compile
+
+The compile plugin provides the `compile` directive, used to on-the-fly compile
+and publish documents.
+
+For instance, if you want to publish files together with their sources (like
+`.tex` and `.pdf` files), you can have the `.tex` file in your source wiki
+directory, and command `\[[!compile files="foo.tex"]]` (or wikilink
+`\[[foo.tex]]`, if the right option is set) will compile file and render as a
+link to the `.pdf` file.
+
+[[!toc startlevel=2]]
+
+## Warning
+
+Some important security notice.
+
+- This plugins allows user to execute arbitrary commands when compiling the
+ wiki. Use at your own risk. If you use Ikiwiki as a static web site compiler
+ (and not a wiki), and you are the only one to compile the wiki, there is no
+ risk.
+
+- Source files are published, wheter option `source` is true or not. If
+ `source` is false, source may not be *advertised*, but it is still available
+ somewhere on your website (most likely by replacing in the compiled file URL
+ the extension of the compiled file by the extension of the source file). So,
+ do not use this plugin if you do not want to publish your source files
+ (sorry: I designed this plugin to publish free stuff).
+
+## Rationale
+
+I want to publish some latex files, both source (`.tex`) and compiled (`.pdf`)
+version, but I do not want to maintain two versions of the same file.
+
+Using this plugin, I only have to maintain the `.tex` files, and thoses files
+are compiled on the fly, so that the `pdf` is published.
+
+## String formatting
+
+Strings (destination name, template name and build command) accept python-like
+syntax ``%{name}s``, which is replaced by the value of variable ``name``. The
+following variables are abailable.
+
+- `srcname`: Source name.
+- `srcextension`: Extension of the source name.
+- `filetype`: File type (extension of the source name, otherwise specified by directive).
+- `dirname`: Directory of the source file.
+- `wikiname`: Name of source file, relative to source wiki directory.
+- `srcfullname`: Name of source file, relative to file system root.
+- `basename`: Source name, without directory nor extension.
+- `destname`: Destination name (without directory).
+- `destextension`: Extension of the destination name.
+- `targetname`: Destination name, relative to the destination directory.
+- `destfullname`: Destination name, relative to file system root.
+
+## Directive
+
+### Usage
+
+Basic usage of this plugin is:
+
+ \[[!compile files="foo.ext"]]
+
+It renders file `foo.ext` according to rules defined in the setup file, and
+publish the compiled version.
+
+### Arguments
+
+All the arguments (but `source` and `filetype`) are string which are processed
+using python-like string formatting, and described in the setup options section.
+
+- `files`: List of files used in compilation, as space separated string. For
+ instance, to compile some tex file including a png image, you will have:
+ `files="foo.tex image.png"`. It is not possible to have filenames containing
+ spaces (unless you provide me a patch to recognize escaped spaces).
+- `filetype`: By default, the source file extension is used to determine build
+ command and other configuration. If the same extension refer to different
+ type of files, you can enforce the filetype using this argument. For
+ instance, if some your LaTeX files have to be compiled with `pdflatex`, while
+ the other require `latex`, your `compile_filetypes` can contains two keys
+ `tex` and `texdvi`. By default, LaTeX files will be compiled using
+ configuration associated to `tex`, unless directive has argument
+ `filetype=texdvi`, in which case the latter configuration is used.
+- `destname`: Name of the compiled file name.
+- `build`: Build command.
+- `source`: Boolean to choose whether to publish source file or not. The only
+ effect is the template choice: source is always published (but not always
+ advertised).
+- `template`: Name of the template to use (if set, the `source` option is
+ irrelevant).
+
+### Extensions
+
+Note: This directive does not work if source file name does not have an
+extension (i.e. does not contain a dot). This should not be too hard to
+implement, but I do not need it. Patches welcome.
+
+## Configuration
+
+Here are the setup options (most of them can be overloaded on a per-extension
+basis by setup option `compile_filetypes`, or by directive arguments):
+
+- `compile_source` (boolean): should sources be published with compiled file
+ (this only affect template choice; see warning)? Default is true.
+- `compile_template_source` (string): name of the template to use for compiled
+ files when option `source` is true. Default is `compile_source.tmpl`.
+- `compile_template_nosource` (string): name of the template to use for
+ compiled files when option `source` is false. Default is
+ `compile_nosource.tmpl`.
+- `compile_filetypes` (string): Per extension configuration (see paragraph
+ below).
+- `compile_tmpdir` (string): Path of a directory to use to compile files:
+ source file (and dependency) are copied to this directory before being
+ compiled (to avoid messing the ikiwiki directory with compiled version or
+ auxiliary files). Default is `SOURCE_WIKI/.ikwiki/tmp/compile`.
+- `compile_bindir` (string): Directory containing binaries to use to compile
+ files. Default is undefined.
+- `compile_depends` (string): List of files all compiled files will depend on
+ (see *Compilation* section below).
+- `compile_build` (string): Command to use to compile files. Default
+ is undefined.
+- `compile_inline` (boolean): If true, wikilinks pointing to files with an
+ extension specified in `compile_filetypes` are treated as a directive
+ \[[!compile files="LINK"]]. For instance, if this is set globally (or just
+ for tex), a wikilink \[[foo.tex]] will compile file `foo.tex`, and publish
+ the compiled `foo.pdf` file.
+
+### The `compile_filetypes` option
+
+This variable is a json string, representing a dictionary. Keys are source file
+extensions, values are dictionary of options applying only to files with this
+extension.
+
+Keys of these new directory are `source`, `template_nosource`,
+`template_source`, `build`, `depends`, `inline`, and overrides generic options
+defined above. They are themselves overriden by directive arguments (excepted
+`inline`).
+
+Example:
+
+ compile_filetypes => '{
+ "tex": {
+ "build": "pdflatex %{basename}s",
+ "destname": "%{basename}s.pdf",
+ "depends": ["logo.png"],
+ "inline": "1"
+ },
+ "texdvi": {
+ "build": "latex %{basename}s",
+ "destname": "%{basename}s.pdf",
+ "depends": ["logo.eps"]
+ }
+ }'
+
+## Compilation
+
+### Dependencies
+
+Before compilation, the source file and all dependencies are copied to the
+temporary directory defined by option `compile_tmpdir`. For instance, if all
+you LaTeX files are compiled using a custom class `foo.sty`, and a particular
+file `bar.tex` uses the `logo.png` file, your setup option will contain
+`foo.sty` as `depends`, and `compile` directive will be called using
+`\[[!compile files="bar.tex logo.png"]]`. Then, before compilation, files
+`foo.sty`, `bar.tex` and `logo.png` will be copied in the same temporary
+directory.
+
+Note that path are *flattened* when copied: before performing compilation of
+directive `\[[!compile files="sub1/foo sub2/bar"]]`, files `foo` and `bar` will
+be copied in the same directory: this temporary directory will contain failes
+`foo` and `bar`, but not `sub1/foo` and `sub2/bar`.
+
+### Build command
+
+The build command used is (if defined, by priority order):
+
+- defined by argument `build` of directive;
+- setup command ``compile_filetypes{TYPE}{build}``;
+- setup command ``compile_build`` (if you have a generic build command);
+- command ``$config{compile_bindir}/${extension}s %{srcname}s`` (if setup variable ``compile_bindir``is defined, is a directory, and contains an executable file matching the extension, it will be used);
+- command ``make -f $config{compile_bindir}/make.${extension}s %{destname}s`` (if setup variable ``compile_bindir`` is defined, is a directory, and contains a readable makefile ``make.EXTENSION``, it will be used).
+
+## Template
+
+The way links are rendered is defined in a template, which is (by order of
+priority, some of them depends on whether ``source`` is true):
+
+- argument `template` of directive;
+- setup variable ``compile_filetypes{TYPE}{template_source}`` or ``compile_filetypes{TYPE}{template_nosource}``;
+- setup variable ``compile_source`` or ``compile_nosource``;
+- `compile_source.mdwn` or `compile_nosource.mdwn`.
+
+It is passed the following variables:
+
+- `DESTURL`: URL to the compiled file.
+- `DESTNAME`: Name of the compiled file.
+- `SRCURL`: URL to the source file.
+- `SRCNAME`: Name of the source file (without directory).
+- `ORIGNAME`: Name of the source file (with directory).
+
+Note that templates can be used to display images (instead of a link to them).
+For instance, if you have a `.tiff` file you want to convert to png before
+displaying it on your website, you can use as a template:
+
+ <img src="<TMPL_VAR DESTURL>">
--- /dev/null
+I've submitted a couple of patches in [this pull request](https://github.com/jmtd/ikiwiki/pull/1).
+The first passes along whatever parameters are being supplied to the pagespec evaluation
+(without which, specs like `user(alice)` don't work).
+
+The second changes the "example" returned by `getsetup` to be an actual map, since I saw
+that `--dumpsetup` can make use of that to produce a syntactically correct map example
+in the YAML config file. An earlier commit comment suggested that once was a problem,
+but it doesn't seem to be one now.
+
+Only later did I notice this [[earlier discussion|todo/pagespec_aliases]] suggesting
+that the problem with a map might have been in websetup - which I'm not using, so I don't
+know if it would still be a problem there.
+
+[[jcflack]]
--- /dev/null
+Does this support ikiwiki-specific idioms like `\[[links]]` or `\[[!macros]]`? I looked at [the readme](https://github.com/sciunto/ikiwiki-pandoc) and it's not quite clear there... --[[anarcat]]
+
+> I don't think it needs to? [[wikilinks|ikiwiki/wikilink]] and
+> [[directives|ikiwiki/directive]] (what you called "macros") are handled
+> by the linkify and preprocess hooks, whereas [[plugins/mdwn]] and
+> this plugin are done afterwards, in the htmlize hook. --[[smcv]]
+
+> > I guess that answers my question by a delicious "yes", thanks! --[[anarcat]]
+I have just opened [rubykat/ikiplugins issue #4](https://github.com/rubykat/ikiplugins/issues/4)
+regarding the fact that ymlfront doesn't seem to delete any old pagestate when fields have been
+removed in an edit. The fields are stuck there with their old values until a full rebuild. Seems
+to me ymlfront should just clear out all of the `{ymlfront}` pagestate before parsing the new
+stuff - including in the case where the new page has no ymlfront section at all.
+
+I discovered another slightly-different-but-related issue where simply _changing_ a field value
+in the YAML section doesn't always cause the generated HTML to be updated. Oddly, ikiwiki will
+_say_ it's building the page, but when you look at the HTML output, it's the old content.
+
+Could this involve some clever optimization where ikiwiki looks at the content (that's left over
+after ymlfront stripped out the YAML) and sees it hasn't changed? Does ymlfront need to do
+something more to indicate there is a change? Does the _template_ need to somehow be declared
+to depend on more stuff?
+
+As I said, the log does have a line for 'building' the page, so whatever optimization is happening
+must come later than the determination of what pages to 'build'.
+
+I'm mentioning it here because I'm not sure whether this or the issue on github will be seen
+first - there's a pretty old one open there. This seems to be quite
+potentially useful stuff that never quite got finished - is [[KathrynAndersen]] still
+interested? -- [[jcflack]]
+
+----
+Previous discussion re: delimiters
+
Now that I have implemented a \[[!ymlfront ...]] directive, I would like to remove support for the old "---" delimited format, because
* it is fragile (easily breakable)
**TL;DR**
-[[!toc levels=3]]
+[[!toc levels=4]]
# An odyssey through lots of things that have to be right before OpenID works
>> can't have anything but relatively luckier and unluckier choices, maybe
>> `libwww/perl` is an especially unlucky one?
+>>> Yippee! _My_ provider found their offending `mod_security` rule and took it out,
+>>> so now [ikiwiki.info](/) accepts my OpenID. I'm still not sure it wouldn't be
+>>> worthwhile to change the useragent default.... -- Chap
+
+#### culprit was an Atomicorp ModSecurity rule
+
+Further followup: my provider is using [ModSecurity](https://www.modsecurity.org/)
+with a ruleset commercially supplied by [Atomicorp](https://www.atomicorp.com/products/modsecurity.html),
+which seems to be where this rule came from. They've turned the rule off for _my account_.
+I followed up on my ticket with them, suggesting they at least think about turning it off
+more systemwide (without waiting for other customers to have bizarre problems that are
+hard to troubleshoot), or opening a conversation with Atomicorp about whether such a rule
+is really a good idea. Of course, while they were very responsive about turning it off
+_for me_, it's much iffier whether they'll take my advice any farther than that.
+
+So, this may crop up for anybody with a provider that uses Atomicorp ModSecurity rules.
+
+The ruleset produces a log message saying "turn this rule off if you use libwww-perl", which
+just goes to show whoever wrote that message wasn't thinking about what breaks what. It would
+have to be "turn this rule off if any of _your_ customers might ever need to use or depend on
+an app or service _hosted anywhere else_ that _could_ have been implemented using libwww-perl,
+over which you and your customer have no knowledge or control."
+
+Sigh. -- Chap
+
+> Thanks for the pointer. It seems the open-source ruleset blacklists libwww-perl by default
+> too... this seems very misguided but whatever. I've changed our default User-Agent to
+> `ikiwiki/3.20141012` (or whatever the version is). If we get further UA-blacklisting
+> problems I'm very tempted to go for `Mozilla/5.0 (but not really)` as the
+> next try. --[[smcv]]
+
## Error: OpenID failure: naive_verify_failed_network: Could not contact ID provider to verify response.
Again, this could have various causes. It was helpful to bump the debug level
> To be clear, these are patches to [[!cpan LWPx::ParanoidAgent]].
> Debian's `liblwpx-paranoidagent-perl (>= 1.10-3)` appears to
> have those two patches. --[[smcv]]
+>
+> Irrelevant to this ikiwiki instance, perhaps relevant to others:
+> I've added these patches to [pkgsrc](http://www.pkgsrc.org)'s
+> [[!pkgsrc www/p5-LWPx-ParanoidAgent]] and they'll be included in the
+> soon-to-be-cut 2014Q3 branch. --[[schmonz]]
## Still naive_verify_failed_network, new improved reason
> Also in Debian's `liblwpx-paranoidagent-perl (>= 1.10-3)`, for the record.
> --[[smcv]]
+>
+> And now in pkgsrc's `www/p5-LWPx-ParanoidAgent`, FWIW. --[[schmonz]]
Only that still doesn't end the story, because that hand didn't know what
[this hand](https://github.com/noxxi/p5-io-socket-ssl/commit/4f83a3cd85458bd2141f0a9f22f787174d51d587#diff-1)
> (which is where ikiwiki.info's supporting packages come from).
> Please report it upstream too, if the Debian maintainer doesn't
> get there first. --[[smcv]]
+>
+> Applied in pkgsrc. I haven't attempted to conduct before-and-after
+> test odysseys, but here's hoping your travails save others some
+> time and effort. --[[schmonz]]
+
+> Reported upstream as [LWPx-ParanoidAgent#14](https://github.com/csirtgadgets/LWPx-ParanoidAgent/issues/14)
+> _and_ [IO-Socket-SSL#16](https://github.com/noxxi/p5-io-socket-ssl/issues/16). -- Chap
# Success!!
[[the techno-viking|http://techno-viking.com/posts/ikiwiki-maps/]] and fixed up
by [[anarcat]].
-See [[the Mtl-mesh
-wiki|http://mesh.openisp.ca/nodes/anarcat]] for a sample of what this
+See [[the Reseaulibre.ca wiki|http://reseaulibre.ca/]] for a sample of what this
plugin can do
See also [[plugins/contrib/googlemaps]].
what about handling a `shortcuts-local.mdwn` or `shortcuts/local.mdwn` (if such
a file exists in the wiki), and additionally process that one. Possibily a
conditional `\[[!inline]]` could be used. --[[tschwinge]]
+
+----
+
+The page says
+
+> Additionally, %W is replaced with the text encoded just right for Wikipedia
+
+with the implication that this is odd. However, it appears the escapes
+actually mean:
+
+=%s=
+ If every character in the string is in the Latin-1 range, encode each
+ character as a http %xx escape: ö -> %F6. If not,
+ mangle the string: ☃ (U+2603 SNOWMAN) -> %2603 which
+ actually means "&03".
+=%S=
+ Leave the string as-is.
+=%W=
+ Encode the string as UTF-8, then encode each byte of the UTF-8
+ individually as a http %xx escape: ö -> %C3%B6, ☃ (U+2603 SNOWMAN) ->
+ %E2%98%83.
+
+http %xx encoding is defined in terms of input bytes, not input characters,
+so you can't encode arbitrary Unicode into URLs without knowing which
+encoding the destination server is going to use. UTF-8 is what's
+recommended by the [[!wikipedia Internationalized resource identifier]]
+specification, so I suspect %W is right more often than it's wrong...
+
+I wonder whether %s should mean what %W does now, with a new format
+character - maybe %L for Latin-1? - for the version that only works
+for strings that can be encoded losslessly in Latin-1? --[[smcv]]
of a page. So the key is the page name, and the value is the source
filename. Do not modify this hash.
+Attachments also appear in this hash, with the same key and value.
+
$pagesources{"foo"} = "foo.mdwn";
+ $pagesources{"logo/ikiwiki.png"} = "logo/ikiwiki.png";
+
### `%destsources`
The `%destsources` hash records the name of the source file used to
create each destination file. The key is the output filename (ie,
-"foo/index.html"), and the value is the source filename that it was built
-from (eg, "foo.mdwn"). Note that a single source file may create multiple
+"foo/index.html"), and the value is the name of the page that it was built
+from (eg, "foo"). Note that a single source file may create multiple
destination files. Do not modify this hash directly; call `will_render()`.
-
- $destsources{"foo/index.html"} = "foo.mdwn";
+
+Attachments also appear in this hash, with the same key and value.
+
+ $destsources{"foo/index.html"} = "foo";
+ $destsources{"logo/ikiwiki.png"} = "logo/ikiwiki.png";
## Library functions
-
This is the [[SandBox]], a page anyone can edit to try out ikiwiki
(version [[!version ]]).
`pre?`
+Testing. Test. 試験として書き込みします。
* [[!shortcut name=mozillazinekb url="http://kb.mozillazine.org/%s"]]
* [[!shortcut name=freebsdwiki url="http://wiki.freebsd.org/%s"]]
* [[!shortcut name=hackage url="http://hackage.haskell.org/package/%s"]]
+* [[!shortcut name=pkgsrc url="http://pkgsrc.se/%S"]]
+* [[!shortcut name=doi url="http://dx.doi.org/%s" desc="doi:%s"]]
+* [[!shortcut name=arxiv url="http://arxiv.org/abs/%s" desc="arXiv:%s"]]
To add a new shortcut, use the `shortcut`
[[ikiwiki/directive]]. In the url, "%s" is replaced with the
<li>[[SiteMap]]</li>
<li>[[Contact]]</li>
<li>[[TipJar]]</li>
+<li><a href="http://flattr.com/thing/39811/ikiwiki">Flattr ikiwiki</a></li>
</ul>
-<a href="http://flattr.com/thing/39811/ikiwiki">
-<img src="https://api.flattr.com/button/flattr-badge-large.png"
-alt="Flattr this" title="Flattr this" /></a>
</div>
choose. If you'd like to fund development of a specific feature, see the
[[consultants]] page.
-<a href="https://www.paypal.com/cgi-bin/webscr?cmd=_xclick&business=joey%40kitenet%2enet&item_name=ikiwiki&no_shipping=1&cn=Comments%3f&tax=0¤cy_code=USD&lc=US&bn=PP%2dDonationsBF&charset=UTF%2d8"><img src="https://www.paypal.com/en_US/i/btn/x-click-but04.gif" alt="donate with PayPal" /></a>
-
-<script type="text/javascript">var flattr_url = 'http://ikiwiki.info';</script>
-<script src="http://api.flattr.com/button/load.js" type="text/javascript"></script>
+* [Donate with PayPal](https://www.paypal.com/cgi-bin/webscr?cmd=_xclick&business=joey%40kitenet%2enet&item_name=ikiwiki&no_shipping=1&cn=Comments%3f&tax=0¤cy_code=USD&lc=US&bn=PP%2dDonationsBF&charset=UTF%2d8)
+* [Donate with Flattr](http://flattr.com/thing/39811/ikiwiki)
Thanks to the following people for their kind contributions:
* Amitai Schlair
* Luca Capello
-(Note that this page is locked to prevent anyone from tampering with the PayPal button.
+(Note that this page is locked to prevent anyone from tampering with the PayPal link.
If you prefer your donation *not* be listed here, let [[Joey]] know.)
7. [install binary packages (OSX)](http://www.pkgsrc.org/#index1h1)
-{OK} As of 2014/08/24, the [version of ikiwiki in pkgsrc](http://pkgsrc.se/www/ikiwiki) is 3.20140815.
+{OK} As of 2014/10/14, the [version of ikiwiki in pkgsrc](http://pkgsrc.se/www/ikiwiki) is 3.20140916.
-----
Enrique Castilla
-[!] As of 2014/08/24, the [version of ikiwiki in MacPorts](http://www.macports.org/ports.php?by=name&substr=Ikiwiki) is 3.20110608.
+[!] As of 2014/10/14, the [version of ikiwiki in MacPorts](http://www.macports.org/ports.php?by=name&substr=Ikiwiki) is 3.20110608.
-----
--- /dev/null
+Is there any current thought of reworking ikiwiki CGI using
+any of the suggested [[!cpan CGI::Alternatives]] in response to the
+official deprecation of [[!cpan CGI desc=CGI.pm]]?
+
+Or is it just a matter of 'hold course until [[rewrite ikiwiki in haskell]]'?
+
+> I don't know whether Joey has any plans in this direction, but
+> [[!cpan CGI]], [[!cpan CGI::Session]] and [[!cpan CGI::FormBuilder]]
+> are all documented to be part of the [[plugin API|plugins/write]]
+> so in the short term, I don't think anything is going to change.
+> If someone works out a way to do equivalent hooks nicely in a
+> different framework, that would have to be IkiWiki 4.
+>
+> I'm not sure how much IkiWiki would stand to gain from moving to a
+> more extensive web framework: it doesn't do elaborate URL routing
+> like most webapps, because it produces static HTML for as much of
+> its content as possible anyway. --[[smcv]]
+
+>> One reason for such a change (although a rewrite in haskell is a little drastic, and overlaps with "gitit") would be to allow ikiwiki to run as a shared thread under FastCGI or mod_perl, instead of forking all the time for every new user. The discussion for this is in [[todo/fastcgi_or_modperl_installation_instructions]] and [[todo/multi-thread_ikiwiki]].
+>>
+>> Also right now, there are serious lock contention issues in ikiwiki: any `?do=` action in the CGI is under a global lock right now (`lockwiki()`), for example, which makes scaling ikiwiki to multiple editing users a significant problem. I have seen such contention as a user on this wiki but mostly on the git-annex wiki.
+>>
+>> I, for one, would be happy to see some improvements in this area... --[[anarcat]]
+
+>>> That would be a rewrite, in whatever language: IkiWiki assumes that
+>>> global state is OK, and I don't think keeping existing APIs or
+>>> plugins working unmodified when that changes would be feasible.
+>>>
+>>> It isn't on *my* to-do list, put it that way. --[[smcv]]
+
+>> I'm on a thin pipe, but IIRC CGI.pm is simply no longer going to be bundled with Perl core, and is not deprecated in any other way. Just old, and now an explicit dependency. I may be wrong. --[[schmonz]]
+
+>>> Yeah, that's what perldelta says. Also, in Debian, the future is already
+>>> here: perl-modules Recommends libcgi-pm-perl, and libcgi-pm-perl comes
+>>> before the bundled copy in the search path, so I'm already testing against
+>>> an unbundled CGI.pm, and I suspect Joey is too. --[[smcv]]
> CGI-generated pages should generate those links. This was the implementation of
> [[todo/want_to_avoid_ikiwiki_using_http_or_https_in_urls_to_allow_serving_both]].
>
+>> This wasn't actually the case if the schemes are different; but now
+>> IkiWiki will generate protocol-relative URLs if the CGI is https,
+>> the url is http and the hostname is the same (i.e. it assumes that the https
+>> equivalent of the url will also work). This is to prevent mixed-content
+>> issues, and partially addresses this todo item.
+>> --[[smcv]]
+>
> If your`$config{url}` and `$config{cgiurl}` have different hostnames (e.g.
> `url => "http://wiki.example.com", cgiurl => "http://cgi.example.com/ikiwiki.cgi"`)
> then you might still have this problem. In principle, IkiWiki could generate
> protocol-relative URLs in this situation, but it isn't clear to me how
> widely-supported those are.
>
+>> HTML5 says protocol-relative URLs work, and they seem to be widely
+>> supported in practice, so I've changed the rule to: if the url and cgiurl
+>> share a scheme (protocol) but differ only by hostname, use `//foo/bar`
+>> protocol-relative URLs. This partially addresses this todo.
+>> I'm still thinking about what the right thing is for more complicated
+>> situations: see [[todo/design for cross-linking between content and CGI]].
+>> --[[smcv]]
+>
> If you set both the `$config{url}` and `$config{cgiurl}` to https, but make
> the resulting HTML available over HTTP as well as HTTPS, that should work
> fine - accesses will be over http until the user either explicitly
--- /dev/null
+Here is a patch for page.tmpl to add these landmarks.
+
+--[Patrick](https://www.google.com/accounts/o8/id?id=AItOawlnBLXDQbzD3OCcqZshcmExPNwlgD0tJ7A)
+
+> This can't be applied as a patch as-is because it's based on Tails'
+> modified `page.tmpl`, but I get the general idea. A reviewer will need
+> to check the ARIA meanings of those roles to confirm that they
+> are appropriate (I haven't done that yet). [[!tag patch]] --[[smcv]]
+
+[[!format diff """
+diff --git a/templates/page.tmpl b/templates/page.tmpl
+index 5efad1a..cb76590 100644
+--- a/templates/page.tmpl
++++ b/templates/page.tmpl
+@@ -30,7 +30,7 @@
+ </head>
+ <body>
+
+-<div class="banner">
++<div class="banner" role="banner">
+ <a class="tails" href="<TMPL_VAR HOMEPAGEURL>">
+ <span class="acronym">Tails</span><br/>
+ <span class="slogan">The Amnesic Incognito Live System</span>
+@@ -155,20 +155,20 @@
+ <TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
+
+ <TMPL_IF SIDEBAR>
+-<TMPL_IF HTML5><aside class="sidebar"><TMPL_ELSE><div class="sidebar"></TMPL_IF>
++<TMPL_IF HTML5><aside class="sidebar" role="navigation"><TMPL_ELSE><div class="sidebar" role="navigation"></TMPL_IF>
+ <TMPL_VAR SIDEBAR>
+ <TMPL_IF HTML5></aside><TMPL_ELSE></div></TMPL_IF>
+ </TMPL_IF>
+
+ <div id="pagebody">
+
+-<TMPL_IF HTML5><section id="content"><TMPL_ELSE><div id="content"></TMPL_IF>
++<TMPL_IF HTML5><section id="content" role="main"><TMPL_ELSE><div id="content" role="main"></TMPL_IF>
+ <TMPL_VAR CONTENT>
+ <TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
+
+ <TMPL_UNLESS DYNAMIC>
+ <TMPL_IF COMMENTS>
+-<TMPL_IF HTML5><section id="comments"><TMPL_ELSE><div id="comments"></TMPL_IF>
++<TMPL_IF HTML5><section id="comments" role="complementary"><TMPL_ELSE><div id="comments" role="complementary"></TMPL_IF>
+ <TMPL_VAR COMMENTS>
+ <TMPL_IF ADDCOMMENTURL>
+ <div class="addcomment">
+@@ -183,7 +183,7 @@
+
+ </div>
+
+-<TMPL_IF HTML5><footer id="footer" class="pagefooter"><TMPL_ELSE><div id="footer" class="pagefooter"></TMPL_IF>
++<TMPL_IF HTML5><footer id="footer" class="pagefooter" role="contentinfo"><TMPL_ELSE><div id="footer" class="pagefooter" role="contentinfo"></TMPL_IF>
+ <TMPL_UNLESS DYNAMIC>
+ <TMPL_IF HTML5><nav id="pageinfo"><TMPL_ELSE><div id="pageinfo"></TMPL_IF>
+
+"""]]
+
+----
+
+Here is a review. Please "sign" any responses so we can keep track of
+who is talking to who :-)
+
+General points:
+
+The `role` attribute is not allowed by the XHTML 1.0 Strict DTD, so we
+should only emit it in HTML5 mode (and we should probably
+[[todo/generate_HTML5_by_default]]).
+
+Specific roles:
+
+[[!format diff """
+-<div class="banner">
++<div class="banner" role="banner">
+"""]]
+
+There is no such class in IkiWiki's page.tmpl, so this part can't be applied.
+After this is applied to the main IkiWiki, you'll need to talk to the
+maintainers of the Tails wiki about changing that in their fork of the template.
+
+[[!format diff """
+-<TMPL_IF HTML5><aside class="sidebar"><TMPL_ELSE><div class="sidebar"></TMPL_IF>
++<TMPL_IF HTML5><aside class="sidebar" role="navigation"><TMPL_ELSE><div class="sidebar" role="navigation"></TMPL_IF>
+"""]]
+
+I don't think the sidebar is *necessarily* navigation, although it's a
+reasonable guess. I would hope that the fact that it's an `<aside>`
+in HTML5 mode is enough to give accessibility tools a clue already?
+Would declaring this to be a `note` be sufficient?
+
+I've applied your suggested roles for #main, #comments and #footer,
+but only in HTML5 mode for the reason given above. I have not applied
+a role to the sidebar just yet.
+
+--[[smcv]]
--- /dev/null
+We're accumulating a significant number of bugs related to cross-linking
+between the content and the CGI not being as relative as we would like.
+This is an attempt to design a solution for them all in a unified way,
+rather than solving one bug at the cost of exacerbating another.
+--[[smcv]]
+
+# Terminology
+
+* Absolute: starts with a scheme, like
+ `http://example.com/ikiwiki.cgi`, `https://www.example.org/`
+
+* Protocol-relative: starts with `//` like `//example.com/ikiwiki.cgi`
+
+* Host-relative: starts with `/` like `/ikiwiki.cgi`
+
+* Relative: starts with neither `/` nor a scheme, like `../ikiwiki.cgi`
+
+# What we need
+
+* Static content must be able to link to other static content
+
+* Static content must be able to link to the CGI
+
+* CGI-generated content must be able to link to arbitrary
+ static content (it is sufficient for it to be able to link
+ to the "root" of the `destdir`)
+
+* CGI-generated content must be able to link to the CGI
+
+# Constraints
+
+* URIs in RSS feeds must be absolute, because feed readers do not have
+ any consistent semantics for the base of relative links
+
+* If we have a `<base href>` then HTML 4.01 says it must be
+ absolute, although HTML 5 does relax this by defining semantics
+ for a relative `<base href>` - it is interpreted relative to the
+ "fallback base URL" which is the URL of the page being viewed
+ ([[bugs/trouble_with_base_in_search]],
+ [[bugs/preview_base_url_should_be_absolute]])
+
+* It is currently possible for the static content and the CGI
+ to be on different domains, e.g. `www.example.com`
+ vs. `cgi.example.com`; this should be preserved
+
+* It is currently possible to serve static content "mostly over
+ HTTP" (i.e. advertise a http URI to readers, and use a http
+ URI in RSS feeds etc.) but use HTTPS for the CGI
+
+* If the static content is served over HTTPS, it must refer
+ to other static content and the CGI via HTTPS (to avoid
+ mixed content, which is a vulnerability); this may be
+ either absolute, protocol-relative, host-relative or relative
+
+* If the CGI is served over HTTPS, it must refer to static
+ content and the CGI via HTTPS; again, this may be either
+ either absolute, protocol-relative, host-relative or relative
+ ([[todo/Protocol_relative_urls_for_stylesheet_linking]])
+
+* Because reverse proxies and `w3mmode` exist, it must be
+ possible to configure ikiwiki to not believe the `HTTPS`, etc.,
+ CGI variables, and force a particular scheme or host
+ ([[bugs/W3MMode_still_uses_http:__47____47__localhost__63__]],
+ [[forum/Using_reverse_proxy__59___base_URL_is_http_instead_of_https]],
+ [[forum/Dot_CGI_pointing_to_localhost._What_happened__63__]])
+
+* For relative links in page-previews to work correctly without
+ having to have global state or thread state through every use of
+ `htmllink` etc., `cgitemplate` needs to make links in the page body
+ work as if we were on the page being previewed.
+
+# "Would be nice"
+
+* In general, the more relative the better
+
+* [[schmonz]] wants to direct all CGI pageviews to https
+ even if the visitor comes from http (but this can be done
+ at the webserver level by making http://example.com/ikiwiki.cgi
+ a redirect to https://example.com/ikiwiki.cgi, so is not
+ necessarily mandatory)
+
+* [[smcv]] has some sites that have non-CA-cartel-approved
+ certificates, with a limited number of editors who can be taught
+ to add SSL policy exceptions and log in via https;
+ anonymous/read-only actions like `do=goto` should
+ not go via HTTPS, since random readers would get scary SSL
+ warnings
+ ([[todo/want_to_avoid_ikiwiki_using_http_or_https_in_urls_to_allow_serving_both]],
+ [[forum/CGI_script_and_HTTPS]])
+
+* It would be nice if the CGI did not need to use a `<base>` so that
+ we could use host-relative URI references (`/sandbox/`) or scheme-relative
+ URI references (`//static.example.com/sandbox/`)
+ (see [[bugs/trouble_with_base_in_search]])
+
+As a consequence of the "no mixed content" constraint, I think we can
+make some assumptions:
+
+* if the `cgiurl` is http but the CGI discovers at runtime that it has
+ been reached via https, we can assume that the https equivalent,
+ or a host- or protocol-relative URI reference to itself, would work;
+
+* if the `url` is http but the CGI discovers at runtime that it has been
+ reached via https, we can assume that the https equivalent of the `url`
+ would work
+
+In other words, best-practice would be to list your `url` and `cgiurl`
+in the setup file as http if you intend that they will most commonly
+be accessed via http (e.g. the "my cert is not CA-cartel approved"
+use-case), or as https if you intend to force accesses into
+being via https (the "my wiki is secret" use-case).
+
+# Regression test
+
+I've added a regression test in `t/relativity.t`. We might want to
+consider dropping some of it or skipping it unless a special environment
+variable is set once this is all working, since it's a bit slow.
+--[[smcv]]
+
+# Remaining bugs
+
+## Arguable
+
+* Configure the url and cgiurl to both be https, then access the
+ CGI via a non-https address. The stylesheet is loaded from the http
+ version of the static site, but maybe it should be forced to https?
+
+* Configure url = "http://static.example.com/",
+ cgiurl = "http://cgi.example.com/ikiwiki.cgi" and access the
+ CGI via staging.example.net. Self-referential links to the
+ CGI point to cgi.example.com, but maybe they should point to
+ staging.example.net?
+
+* *(possibly incomplete, look for TODO and ??? in relativity.t)*
>> what make `pagespec_match_list` more efficient than repeated
>> `pagespec_match_list`." to give an idea of why it is there in the first
>> place. --[[chrysn]]
+
+>>> [[done]] in 3.20140916 --s
--- /dev/null
+The `html5` option was added in 2010 and marked as "not experimental" in 2011
+but is not the default.
+
+According to <http://caniuse.com/#feat=html5semantic>, current versions of
+all recent versions of all major browsers - even IE (9+) - support the HTML5
+semantic elements (`<section>` etc.), except for `<main>` which IkiWiki
+doesn't use anyway. However, IE 8 is not a current version, but has ~ 4%
+market share and doesn't support `<section>` and friends; so there's still
+a compatibility concern there.
+
+In particular, non-HTML5 mode uses `<!DOCTYPE html PUBLIC
+"-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">`
+which doesn't allow newer markup like the `role` attribute, so we can't close
+[[todo/add_aria_landmarks_to_make_ikiwiki_websites_more_accessible]] while
+remaining XHTML 1.0 Strict. The recommended pseudo-doctype for HTML5, and for
+HTML with ARIA markup, is `<!DOCTYPE html>`.
+
+(I do think we should continue to use `<xml-compatible-tags />` and output
+well-formed XML so people who want to do XSLT tricks with IkiWiki's output
+can do so, though.)
+
+In practice, real browsers have never actually implemented a strict XHTML mode:
+they've always parsed `text/html` as "tag soup", because they need a tag-soup
+parser anyway, and nobody wants to maintain two parsers.
+
+Kai also wants a HTML5 doctype for [[bugs/more mobile friendly default themes]].
+
+Options include:
+
+* set html5 to 1 by default but retain the dual-mode templates,
+ breaking IE 8 by default
+
+* remove the option and always behave as if it had been 1, simplifying
+ the templates and breaking IE 8 unconditionally
+
+* either of the above and include
+ [html5shiv](https://code.google.com/p/html5shiv/) to de-break IE 8
+
+* change the doctype to `<!DOCTYPE html>`
+ unconditionally, stop trying to limit ourselves to XHTML 1.0 Strict
+ (use HTML5 features that degrade gracefully, like
+ [[ARIA roles|todo/add aria landmarks to make ikiwiki websites more accessible]]),
+ but avoid using the new elements like `<section>` that require specific
+ browser support unless `html5` is set to 1. That would get rid of the
+ backwards-compatibility concerns while keeping the ability to use
+ post-2000 markup; we can use `html5` to mean "be more enthusiastic about
+ HTML5 features even if they might fail on older browsers".
+
+Using the HTML5 doctype does mean we lose the ability to validate the output
+against a DTD (as `wdg-html-validator` does), but DTDs have very little to
+do with practical browser compatibility in any case.
+
+[[!template id=gitbranch branch=smcv/ready/html5
+author="[[Simon McVittie|smcv]]"
+browse=http://git.pseudorandom.co.uk/smcv/ikiwiki.git/shortlog/refs/heads/ready/html5]]
+[[!tag patch users/smcv/ready]]
+
+At the moment my preferred option is the last, for which see my `ready/html5`
+branch. I'll apply this at some point if there are no objections.
+
+--[[smcv]]
--- /dev/null
+If I'm reading the docs right, I count three different ways
+of associating some local styling information with a portion
+of a site:
+
+* [[plugins/localstyle]] uses the [[ikiwiki/subpage/LinkingRules]] to find the 'nearest' stylesheet
+* [[plugins/edittemplate]] uses a directive with a [[ikiwiki/PageSpec]] to indicate which
+ pages should get which templates
+* [[plugins/pagetemplate]] doesn't do a thing for you unless you shoehorn a
+ `pagetemplate` directive into every affected page.
+
+That last is the one that seems least useful. The [[ikiwiki/PageSpec]] approach seems
+most flexible.
+
+Would it be a bad thing to allow `pagetemplate` to work the way `edittemplate` does?
+Maybe just extend the existing directive? If it has a `pages` parameter, it specifies
+the template for the supplied [[ikiwiki/PageSpec]], otherwise it just affects the enclosing page
+as it does now?
+
+--Chap
When using the GeoJSON output of the OSM plugin (osm_format: GeoJSON), the name and description in the popups are missing, this patch fixes the issue.
+> "Fix the title and description of map popups"
+>
+> + # Rename desc to description (this matches the kml output)
+>
+> Is there a spec for this anywhere, or a parser with which it needs to be
+> compatible?
+> --[[smcv]] [[!tag reviewed]]
+>> This is especially confusing because this is actually about JSON, not KML. Disregarding that, here's the [geojson homepage](http://geojson.org/) which has a link to the spec. The spec doesn't seem to specify `description`, `desc` or `name` anywhere. --[[anarcat]]
+
+>> No, although I believe the only code that parses this is line 112 of
+>> [underlays/osm/ikiwiki/osm.js](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=underlays/osm/ikiwiki/osm.js;h=37e588f7b5bba4c1125052f82c358359a3459705;hb=HEAD#l112).
+
+>>> Ah, right, then this may make sense after all... --[[anarcat]]
Currently, the documented icon parameter to the waypoint directive is not used. This patch fixes that, and fixes some related problems in the KML output.
> That patch looks pretty awesome, thanks for your work on it. I don't have time to test it now, but if it works, I am all for its inclusion. --[[anarcat]]
+
+> + my $tag = $params{'tag'};
+>
+> Please check indentation: you're mixing spaces and hard tabs, apparently
+> with the assumption that a tab is worth 4 spaces.
+>
+> - my $icon = $config{'osm_default_icon'} || "ikiwiki/images/osm.png"; # sanitized: we trust $config
+> + my $icon = $params{'icon'}; # sanitized: we trust $config
+>
+> So there's a comment there that explains why the value of `$icon` can
+> be trusted, but it is no longer true, because it no longer comes from
+> `$config`. This does not fill me with confidence. Maybe it's OK to use
+> a wiki-editor-supplied icon, maybe not. If it is OK, please justify why,
+> and in any case, please do not leave old comments if they are no longer
+> true.
+>
+> In this case I suspect editors may be able to specify an icon whose URL is
+> `javascript:alert("cross-site scripting!")` (or something more malicious)
+> and have it written into the KML as-is. The osm plugin has had cross-site
+> scripting vulnerabilities before, I don't want to add another.
+>
+> + externalGraphic: "${icon}"
+>
+> I don't think Perl variable interpolation is going to work in Javascript?
+> I suspect this should have been inserting something into the GeoJSON instead?
+>
+> --[[smcv]]
--- /dev/null
+The [stay of execution](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=6660fd643bf3b65745d62b24cb16fef1b5205207) of Google's OpenID support, possibly until 2017,
+_only_ applies to ikiwikis that had already been live and whose `openid_realm`s had been
+seen by Google in at least one request by 19 May 2014, or whose admins had emailed Google to
+request an extension by 15 June 2014 ... this according to Miguel Andres's answer on
+[this thread](http://stackoverflow.com/questions/23773275/changed-domain-error-openid-auth-request-contains-an-unregistered-domain).
+
+Google will not work as an OpenID provider for any ikiwiki set up since that time.
+
+So, probably that Google option shouldn't be in the OpenID selector; maybe there
+should be an option: default _off_, can be turned _on_ for an established ikiwiki
+instance that is known to be grandfathered.
+
+-- [[jcflack]]
> Reviewed, tested: looks good to me. We need it for the [Tails forum](https://tails.boum.org/forum/). --[[intrigeri]]
>> Hi, is there a chance of seeing this plugin getting included in a release at any point soon? --sajolida
+
+>>> (Reviewing, better late than never...)
+>>>
+>>> It seems really non-obvious to me that the mtime of a page is
+>>> updated as a side-effect of sorting. I think it might also happen too
+>>> late for it to have the desired effect: mtimes should be updated before
+>>> the build phase starts, but sorting happens during the build phase.
+>>>
+>>> If we had a solution for [[!debbug 479371]] - copying
+>>> the mtime from child pages to a parent page - then it would
+>>> be enough to configure the forum threads to inherit the mtime
+>>> of their comments, and then sorting by mtime would do what
+>>> you wanted. The remaining problem would be to have a page pick up the
+>>> most recent mtime from a somewhat configurable set of pages. If the page
+>>> selection is done by pagespec, then by the time those can be matched
+>>> deterministically, it's also too late to be getting the desired
+>>> effect from changing mtimes... so perhaps this is a non-starter.
+>>>
+>>> Alternatively, perhaps just doing the sorting, and updating some
+>>> displayable last-update counter that is not the mtime, would be OK?
+>>> --[[smcv]]
--- /dev/null
+[[!meta redir=users/schmonz]]
[Amitai Schlair](http://www.schmonz.com/) has contributed code to ikiwiki...
[[!map
-pages="!*/Discussion and ((link(users/schmonz) and plugins/*) or rcs/cvs or todo/fancypodcast)"
+pages="!*/Discussion and ((link(users/schmonz) and plugins/* and !plugins/openid/*) or rcs/cvs or todo/fancypodcast)"
]]
...and uses ikiwiki for all sorts of things:
BEGIN {
@pages=qw(index features news plugins/map security);
- if (! -x "/usr/bin/validate") {
- plan skip_all => "/usr/bin/validate html validator not present";
+ if (system("command -v validate >/dev/null") != 0) {
+ plan skip_all => "html validator not present";
}
else {
plan(tests => int @pages + 2);
--- /dev/null
+#!/usr/bin/perl
+use warnings;
+use strict;
+
+use Test::More;
+plan(skip_all => "IPC::Run not available")
+ unless eval q{
+ use IPC::Run qw(run);
+ 1;
+ };
+
+use IkiWiki;
+
+use Cwd qw(getcwd);
+use Errno qw(ENOENT);
+
+# Black-box (ish) test for relative linking between CGI and static content
+
+sub parse_cgi_content {
+ my $content = shift;
+ my %bits;
+ if ($content =~ qr{<base href="([^"]+)" */>}) {
+ $bits{basehref} = $1;
+ }
+ if ($content =~ qr{href="([^"]+/style.css)"}) {
+ $bits{stylehref} = $1;
+ }
+ if ($content =~ qr{class="parentlinks">\s+<a href="([^"]+)">this is the name of my wiki</a>/}s) {
+ $bits{tophref} = $1;
+ }
+ if ($content =~ qr{<a[^>]+href="([^"]+)\?do=prefs"}) {
+ $bits{cgihref} = $1;
+ }
+ return %bits;
+}
+
+sub write_old_file {
+ my $name = shift;
+ my $content = shift;
+
+ writefile($name, "t/tmp/in", $content);
+ ok(utime(333333333, 333333333, "t/tmp/in/$name"));
+}
+
+sub write_setup_file {
+ my (%args) = @_;
+ my $urlline = defined $args{url} ? "url: $args{url}" : "";
+ my $w3mmodeline = defined $args{w3mmode} ? "w3mmode: $args{w3mmode}" : "";
+ my $reverseproxyline = defined $args{reverse_proxy} ? "reverse_proxy: $args{reverse_proxy}" : "";
+
+ writefile("test.setup", "t/tmp", <<EOF
+# IkiWiki::Setup::Yaml - YAML formatted setup file
+wikiname: this is the name of my wiki
+srcdir: t/tmp/in
+destdir: t/tmp/out
+templatedir: templates
+$urlline
+cgiurl: $args{cgiurl}
+$w3mmodeline
+cgi_wrapper: t/tmp/ikiwiki.cgi
+cgi_wrappermode: 0754
+html5: $args{html5}
+# make it easier to test previewing
+add_plugins:
+- anonok
+anonok_pagespec: "*"
+$reverseproxyline
+ENV: { 'PERL5LIB': 'blib/lib:blib/arch' }
+EOF
+ );
+}
+
+sub thoroughly_rebuild {
+ ok(unlink("t/tmp/ikiwiki.cgi") || $!{ENOENT});
+ ok(! system("./ikiwiki.out --setup t/tmp/test.setup --rebuild --wrappers"));
+}
+
+sub check_cgi_mode_bits {
+ my (undef, undef, $mode, undef, undef,
+ undef, undef, undef, undef, undef,
+ undef, undef, undef) = stat("t/tmp/ikiwiki.cgi");
+ is($mode & 07777, 0754);
+}
+
+sub check_generated_content {
+ my $cgiurl_regex = shift;
+ ok(-e "t/tmp/out/a/b/c/index.html");
+ my $content = readfile("t/tmp/out/a/b/c/index.html");
+ # no <base> on static HTML
+ unlike($content, qr{<base\W});
+ like($content, $cgiurl_regex);
+ # cross-links between static pages are relative
+ like($content, qr{<li>A: <a href="../../">a</a></li>});
+ like($content, qr{<li>B: <a href="../">b</a></li>});
+ like($content, qr{<li>E: <a href="../../d/e/">e</a></li>});
+}
+
+sub run_cgi {
+ my (%args) = @_;
+ my ($in, $out);
+ my $is_preview = delete $args{is_preview};
+ my $is_https = delete $args{is_https};
+ my %defaults = (
+ SCRIPT_NAME => '/cgi-bin/ikiwiki.cgi',
+ HTTP_HOST => 'example.com',
+ );
+ if (defined $is_preview) {
+ $defaults{REQUEST_METHOD} = 'POST';
+ $in = 'do=edit&page=a/b/c&Preview';
+ $defaults{CONTENT_LENGTH} = length $in;
+ } else {
+ $defaults{REQUEST_METHOD} = 'GET';
+ $defaults{QUERY_STRING} = 'do=prefs';
+ }
+ if (defined $is_https) {
+ $defaults{SERVER_PORT} = '443';
+ $defaults{HTTPS} = 'on';
+ } else {
+ $defaults{SERVER_PORT} = '80';
+ }
+ my %envvars = (
+ %defaults,
+ %args,
+ );
+ run(["./t/tmp/ikiwiki.cgi"], \$in, \$out, init => sub {
+ map {
+ $ENV{$_} = $envvars{$_}
+ } keys(%envvars);
+ });
+
+ return $out;
+}
+
+sub test_startup {
+ ok(! system("make -s ikiwiki.out"));
+ ok(! system("rm -rf t/tmp"));
+ ok(! system("mkdir t/tmp"));
+
+ write_old_file("a.mdwn", "A");
+ write_old_file("a/b.mdwn", "B");
+ write_old_file("a/b/c.mdwn",
+ "* A: [[a]]\n".
+ "* B: [[b]]\n".
+ "* E: [[a/d/e]]\n");
+ write_old_file("a/d.mdwn", "D");
+ write_old_file("a/d/e.mdwn", "E");
+}
+
+sub test_site1_perfectly_ordinary_ikiwiki {
+ write_setup_file(
+ html5 => 0,
+ url => "http://example.com/wiki/",
+ cgiurl => "http://example.com/cgi-bin/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are on the same host so the cgiurl is host-relative
+ check_generated_content(qr{<a[^>]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"});
+ my %bits = parse_cgi_content(run_cgi());
+ is($bits{basehref}, "http://example.com/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:http:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:http:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when accessed via HTTPS, links are secure
+ %bits = parse_cgi_content(run_cgi(is_https => 1));
+ is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when accessed via a different hostname, links stay on that host
+ %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'staging.example.net'));
+ is($bits{basehref}, "http://staging.example.net/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:http:)?//staging.example.net)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:http:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
+
+ # previewing a page
+ %bits = parse_cgi_content(run_cgi(is_preview => 1));
+ is($bits{basehref}, "http://example.com/wiki/a/b/c/");
+ like($bits{stylehref}, qr{^(?:(?:http:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:http:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # in html5, the <base> is allowed to be relative, and we take full
+ # advantage of that
+ write_setup_file(
+ html5 => 1,
+ url => "http://example.com/wiki/",
+ cgiurl => "http://example.com/cgi-bin/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are on the same host so the cgiurl is host-relative
+ check_generated_content(qr{<a[^>]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"});
+
+ %bits = parse_cgi_content(run_cgi());
+ is($bits{basehref}, "/wiki/");
+ is($bits{stylehref}, "/wiki/style.css");
+ is($bits{tophref}, "/wiki/");
+ is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
+
+ # when accessed via HTTPS, links are secure - this is easy because under
+ # html5 they're independent of the URL at which the CGI was accessed
+ %bits = parse_cgi_content(run_cgi(is_https => 1));
+ is($bits{basehref}, "/wiki/");
+ is($bits{stylehref}, "/wiki/style.css");
+ is($bits{tophref}, "/wiki/");
+ is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
+
+ # when accessed via a different hostname, links stay on that host -
+ # this is really easy in html5 because we can use relative URLs
+ %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'staging.example.net'));
+ is($bits{basehref}, "/wiki/");
+ is($bits{stylehref}, "/wiki/style.css");
+ is($bits{tophref}, "/wiki/");
+ is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
+
+ # previewing a page
+ %bits = parse_cgi_content(run_cgi(is_preview => 1));
+ is($bits{basehref}, "/wiki/a/b/c/");
+ is($bits{stylehref}, "/wiki/style.css");
+ like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
+ is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
+}
+
+sub test_site2_static_content_and_cgi_on_different_servers {
+ write_setup_file(
+ html5 => 0,
+ url => "http://static.example.com/",
+ cgiurl => "http://cgi.example.com/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are not on the same host so the cgiurl has to be
+ # protocol-relative or absolute
+ check_generated_content(qr{<a[^>]+href="(?:http:)?//cgi.example.com/ikiwiki.cgi\?do=prefs"});
+
+ my %bits = parse_cgi_content(run_cgi(SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
+ like($bits{basehref}, qr{^http://static.example.com/$});
+ like($bits{stylehref}, qr{^(?:(?:http:)?//static.example.com)?/style.css$});
+ like($bits{tophref}, qr{^(?:http:)?//static.example.com/$});
+ like($bits{cgihref}, qr{^(?:(?:http:)?//cgi.example.com)?/ikiwiki.cgi$});
+
+ # when accessed via HTTPS, links are secure
+ %bits = parse_cgi_content(run_cgi(is_https => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
+ like($bits{basehref}, qr{^https://static.example.com/$});
+ like($bits{stylehref}, qr{^(?:(?:https:)?//static.example.com)?/style.css$});
+ like($bits{tophref}, qr{^(?:https:)?//static.example.com/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//cgi.example.com)?/ikiwiki.cgi$});
+
+ # when accessed via a different hostname, links to the CGI (only) should
+ # stay on that host?
+ %bits = parse_cgi_content(run_cgi(is_preview => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'staging.example.net'));
+ like($bits{basehref}, qr{^http://static.example.com/a/b/c/$});
+ like($bits{stylehref}, qr{^(?:(?:http:)?//static.example.com|\.\./\.\./\.\.)/style.css$});
+ like($bits{tophref}, qr{^(?:(?:http:)?//static.example.com|\.\./\.\./\.\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:http:)?//(?:staging\.example\.net|cgi\.example\.com))?/ikiwiki.cgi$});
+ TODO: {
+ local $TODO = "use self-referential CGI URL?";
+ like($bits{cgihref}, qr{^(?:(?:http:)?//staging.example.net)?/ikiwiki.cgi$});
+ }
+
+ write_setup_file(
+ html5 => 1,
+ url => "http://static.example.com/",
+ cgiurl => "http://cgi.example.com/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are not on the same host so the cgiurl has to be
+ # protocol-relative or absolute
+ check_generated_content(qr{<a[^>]+href="(?:http:)?//cgi.example.com/ikiwiki.cgi\?do=prefs"});
+
+ %bits = parse_cgi_content(run_cgi(SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
+ is($bits{basehref}, "//static.example.com/");
+ is($bits{stylehref}, "//static.example.com/style.css");
+ is($bits{tophref}, "//static.example.com/");
+ is($bits{cgihref}, "//cgi.example.com/ikiwiki.cgi");
+
+ # when accessed via HTTPS, links are secure - in fact they're exactly the
+ # same as when accessed via HTTP
+ %bits = parse_cgi_content(run_cgi(is_https => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
+ is($bits{basehref}, "//static.example.com/");
+ is($bits{stylehref}, "//static.example.com/style.css");
+ is($bits{tophref}, "//static.example.com/");
+ is($bits{cgihref}, "//cgi.example.com/ikiwiki.cgi");
+
+ # when accessed via a different hostname, links to the CGI (only) should
+ # stay on that host?
+ %bits = parse_cgi_content(run_cgi(is_preview => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'staging.example.net'));
+ is($bits{basehref}, "//static.example.com/a/b/c/");
+ is($bits{stylehref}, "//static.example.com/style.css");
+ is($bits{tophref}, "../../../");
+ like($bits{cgihref}, qr{//(?:staging\.example\.net|cgi\.example\.com)/ikiwiki\.cgi});
+ TODO: {
+ local $TODO = "use self-referential CGI URL maybe?";
+ is($bits{cgihref}, "//staging.example.net/ikiwiki.cgi");
+ }
+}
+
+sub test_site3_we_specifically_want_everything_to_be_secure {
+ write_setup_file(
+ html5 => 0,
+ url => "https://example.com/wiki/",
+ cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are on the same host so the cgiurl is host-relative
+ check_generated_content(qr{<a[^>]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"});
+
+ # when accessed via HTTPS, links are secure
+ my %bits = parse_cgi_content(run_cgi(is_https => 1));
+ is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when not accessed via HTTPS, links should still be secure
+ # (but if this happens, that's a sign of web server misconfiguration)
+ %bits = parse_cgi_content(run_cgi());
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ TODO: {
+ local $TODO = "treat https in configured url, cgiurl as required?";
+ is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+ }
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when accessed via a different hostname, links stay on that host
+ %bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net'));
+ is($bits{basehref}, "https://staging.example.net/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//staging.example.net)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
+
+ # previewing a page
+ %bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1));
+ is($bits{basehref}, "https://example.com/wiki/a/b/c/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # not testing html5: 0 here because that ends up identical to site 1
+}
+
+sub test_site4_cgi_is_secure_static_content_doesnt_have_to_be {
+ # (NetBSD wiki)
+ write_setup_file(
+ html5 => 0,
+ url => "http://example.com/wiki/",
+ cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are on the same host but different schemes
+ check_generated_content(qr{<a[^>]+href="https://example.com/cgi-bin/ikiwiki.cgi\?do=prefs"});
+
+ # when accessed via HTTPS, links are secure (to avoid mixed-content)
+ my %bits = parse_cgi_content(run_cgi(is_https => 1));
+ is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when not accessed via HTTPS, ???
+ %bits = parse_cgi_content(run_cgi());
+ like($bits{basehref}, qr{^https?://example.com/wiki/$});
+ like($bits{stylehref}, qr{^(?:(?:https?:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:(?:https?://example.com)?/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when accessed via a different hostname, links stay on that host
+ %bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net'));
+ # because the static and dynamic stuff is on the same server, we assume that
+ # both are also on the staging server
+ like($bits{basehref}, qr{^https://staging.example.net/wiki/$});
+ like($bits{stylehref}, qr{^(?:(?:https:)?//staging.example.net)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:(?:(?:https:)?//staging.example.net)?/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//(?:staging\.example\.net|example\.com))?/cgi-bin/ikiwiki.cgi$});
+ TODO: {
+ local $TODO = "this should really point back to itself but currently points to example.com";
+ like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
+ }
+
+ # previewing a page
+ %bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1));
+ is($bits{basehref}, "https://example.com/wiki/a/b/c/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ write_setup_file(
+ html5 => 1,
+ url => "http://example.com/wiki/",
+ cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are on the same host but different schemes
+ check_generated_content(qr{<a[^>]+href="https://example.com/cgi-bin/ikiwiki.cgi\?do=prefs"});
+
+ # when accessed via HTTPS, links are secure (to avoid mixed-content)
+ %bits = parse_cgi_content(run_cgi(is_https => 1));
+ is($bits{basehref}, "/wiki/");
+ is($bits{stylehref}, "/wiki/style.css");
+ is($bits{tophref}, "/wiki/");
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when not accessed via HTTPS, ???
+ %bits = parse_cgi_content(run_cgi());
+ like($bits{basehref}, qr{^(?:https?://example.com)?/wiki/$});
+ like($bits{stylehref}, qr{^(?:(?:https?:)?//example.com)?/wiki/style.css$});
+ like($bits{tophref}, qr{^(?:(?:https?://example.com)?/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # when accessed via a different hostname, links stay on that host
+ %bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net'));
+ # because the static and dynamic stuff is on the same server, we assume that
+ # both are also on the staging server
+ is($bits{basehref}, "/wiki/");
+ is($bits{stylehref}, "/wiki/style.css");
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//(?:example\.com|staging\.example\.net))?/cgi-bin/ikiwiki.cgi$});
+ TODO: {
+ local $TODO = "this should really point back to itself but currently points to example.com";
+ like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
+ }
+
+ # previewing a page
+ %bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1));
+ is($bits{basehref}, "/wiki/a/b/c/");
+ is($bits{stylehref}, "/wiki/style.css");
+ like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+
+ # Deliberately not testing https static content with http cgiurl,
+ # because that makes remarkably little sense.
+}
+
+sub test_site5_w3mmode {
+ # as documented in [[w3mmode]]
+ write_setup_file(
+ html5 => 0,
+ url => undef,
+ cgiurl => "ikiwiki.cgi",
+ w3mmode => 1,
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # FIXME: does /$LIB/ikiwiki-w3m.cgi work under w3m?
+ check_generated_content(qr{<a[^>]+href="(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi\?do=prefs"});
+
+ my %bits = parse_cgi_content(run_cgi(PATH_INFO => '/ikiwiki.cgi', SCRIPT_NAME => '/cgi-bin/ikiwiki-w3m.cgi'));
+ my $pwd = getcwd();
+ like($bits{tophref}, qr{^(?:\Q$pwd\E/t/tmp/out|\.)/$});
+ like($bits{cgihref}, qr{^(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi$});
+ like($bits{basehref}, qr{^(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out/$});
+ like($bits{stylehref}, qr{^(?:(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out|\.)/style.css$});
+
+ write_setup_file(
+ html5 => 1,
+ url => undef,
+ cgiurl => "ikiwiki.cgi",
+ w3mmode => 1,
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # FIXME: does /$LIB/ikiwiki-w3m.cgi work under w3m?
+ check_generated_content(qr{<a[^>]+href="(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi\?do=prefs"});
+
+ %bits = parse_cgi_content(run_cgi(PATH_INFO => '/ikiwiki.cgi', SCRIPT_NAME => '/cgi-bin/ikiwiki-w3m.cgi'));
+ like($bits{tophref}, qr{^(?:\Q$pwd\E/t/tmp/out|\.)/$});
+ like($bits{cgihref}, qr{^(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi$});
+ like($bits{basehref}, qr{^(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out/$});
+ like($bits{stylehref}, qr{^(?:(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out|\.)/style.css$});
+}
+
+sub test_site6_behind_reverse_proxy {
+ write_setup_file(
+ html5 => 0,
+ url => "https://example.com/wiki/",
+ cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
+ reverse_proxy => 1,
+ );
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+ # url and cgiurl are on the same host so the cgiurl is host-relative
+ check_generated_content(qr{<a[^>]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"});
+
+ # because we are behind a reverse-proxy we must assume that
+ # we're being accessed by the configured cgiurl
+ my %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'localhost'));
+ like($bits{tophref}, qr{^(?:/wiki|\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+ is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+
+ # previewing a page
+ %bits = parse_cgi_content(run_cgi(is_preview => 1, HTTP_HOST => 'localhost'));
+ like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
+ like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
+ is($bits{basehref}, "https://example.com/wiki/a/b/c/");
+ like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
+
+ # not testing html5: 1 because it would be the same as site 1 -
+ # the reverse_proxy config option is unnecessary under html5
+}
+
+test_startup();
+
+test_site1_perfectly_ordinary_ikiwiki();
+test_site2_static_content_and_cgi_on_different_servers();
+test_site3_we_specifically_want_everything_to_be_secure();
+test_site4_cgi_is_secure_static_content_doesnt_have_to_be();
+test_site5_w3mmode();
+test_site6_behind_reverse_proxy();
+
+done_testing();
#!/usr/bin/perl
use warnings;
use strict;
-use Test::More tests => 26;
+use Test::More tests => 31;
BEGIN { use_ok("IkiWiki"); }
is(IkiWiki::cgiurl(cgiurl => 'https://foo/ikiwiki'), "https://foo/ikiwiki");
is(IkiWiki::cgiurl(do => 'badger', cgiurl => 'https://foo/ikiwiki'), "https://foo/ikiwiki?do=badger");
-# with url and cgiurl on different sites, "local" degrades to absolute
+# with url and cgiurl on different sites, "local" degrades to protocol-relative
$IkiWiki::config{url} = "http://example.co.uk/~smcv";
$IkiWiki::config{cgiurl} = "http://dynamic.example.co.uk/~smcv/ikiwiki.cgi";
is(IkiWiki::checkconfig(), 1);
-is(IkiWiki::cgiurl(), "http://dynamic.example.co.uk/~smcv/ikiwiki.cgi");
-is(IkiWiki::baseurl(undef), "http://example.co.uk/~smcv/");
-is(IkiWiki::urlto('stoats', undef), "http://example.co.uk/~smcv/stoats/");
-is(IkiWiki::urlto('', undef), "http://example.co.uk/~smcv/");
+is(IkiWiki::cgiurl(), "//dynamic.example.co.uk/~smcv/ikiwiki.cgi");
+is(IkiWiki::baseurl(undef), "//example.co.uk/~smcv/");
+is(IkiWiki::urlto('stoats', undef), "//example.co.uk/~smcv/stoats/");
+is(IkiWiki::urlto('', undef), "//example.co.uk/~smcv/");
+
+# with url and cgiurl on different schemes, "local" degrades to absolute for
+# CGI but protocol-relative for static content, to avoid the CGI having
+# mixed content
+$IkiWiki::config{url} = "http://example.co.uk/~smcv";
+$IkiWiki::config{cgiurl} = "https://dynamic.example.co.uk/~smcv/ikiwiki.cgi";
+is(IkiWiki::checkconfig(), 1);
+is(IkiWiki::cgiurl(), "https://dynamic.example.co.uk/~smcv/ikiwiki.cgi");
+is(IkiWiki::baseurl(undef), "//example.co.uk/~smcv/");
+is(IkiWiki::urlto('stoats', undef), "//example.co.uk/~smcv/stoats/");
+is(IkiWiki::urlto('', undef), "//example.co.uk/~smcv/");
-<form method="get" action="http://www.google.com/search" id="searchform">
+<form method="get" action="https://www.google.com/search" id="searchform">
<div>
<input name="sitesearch" value="<TMPL_VAR URL>" type="hidden" />
<input name="q" value="" id="searchbox" size="16" maxlength="255" type="text"
<div id="pagebody">
-<TMPL_IF HTML5><section id="content"><TMPL_ELSE><div id="content"></TMPL_IF>
+<TMPL_IF HTML5><section id="content" role="main"><TMPL_ELSE><div id="content"></TMPL_IF>
<TMPL_VAR CONTENT>
<TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
<TMPL_UNLESS DYNAMIC>
<TMPL_IF COMMENTS>
-<TMPL_IF HTML5><section id="comments"><TMPL_ELSE><div id="comments"></TMPL_IF>
+<TMPL_IF HTML5><section id="comments" role="complementary"><TMPL_ELSE><div id="comments"></TMPL_IF>
<TMPL_VAR COMMENTS>
<TMPL_IF ADDCOMMENTURL>
<div class="addcomment">
</div>
-<TMPL_IF HTML5><footer id="footer" class="pagefooter"><TMPL_ELSE><div id="footer" class="pagefooter"></TMPL_IF>
+<TMPL_IF HTML5><footer id="footer" class="pagefooter" role="contentinfo"><TMPL_ELSE><div id="footer" class="pagefooter"></TMPL_IF>
<TMPL_UNLESS DYNAMIC>
<TMPL_IF HTML5><nav id="pageinfo"><TMPL_ELSE><div id="pageinfo"></TMPL_IF>