-ikiwiki.setup
-Makefile
+/ikiwiki.setup
+/Makefile
Makefile.old
blib/*
/cover_db
po/underlays_copy_stamp
underlays/locale
/t/tmp/
+/t/manual/*/git/
+/t/manual/*/html/
my $template=template("page.tmpl");
- my $topurl = $config{url};
- if (defined $cgi && ! $config{w3mmode} && ! $config{reverse_proxy}) {
- $topurl = $cgi->url;
- }
-
my $page="";
if (exists $params{page}) {
$page=delete $params{page};
# Generate the attachment list only after having added any new
# attachments.
- $form->tmpl_param("attachment_list" => [attachment_list($form->field('page'))]);
+ $form->tmpl_param("attachment_list" => [attachment_list(scalar $form->field('page'))]);
}
sub attachment_holding_location {
}
$postcomment=1;
- my $ok=IkiWiki::check_content(content => $form->field('editcontent'),
- subject => $form->field('subject'),
+ my $ok=IkiWiki::check_content(
+ content => scalar $form->field('editcontent'),
+ subject => scalar $form->field('subject'),
$config{comments_allowauthor} ? (
- author => $form->field('author'),
- url => $form->field('url'),
+ author => scalar $form->field('author'),
+ url => scalar $form->field('url'),
) : (),
page => $location,
cgi => $cgi,
length $form->field('subject')) {
$message = sprintf(
gettext("Added a comment: %s"),
- $form->field('subject'));
+ scalar $form->field('subject'));
}
IkiWiki::rcs_add($file);
#!/usr/bin/perl
package IkiWiki::Plugin::cvs;
-# Copyright (c) 2009 Amitai Schlair
+# Copyright (c) 2009 Amitai Schleier
# All rights reserved.
#
# This code is derived from software contributed to ikiwiki
-# by Amitai Schlair.
+# by Amitai Schleier.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
$conflict=rcs_commit(
file => $file,
message => $message,
- token => $form->field("rcsinfo"),
+ token => scalar $form->field("rcsinfo"),
session => $session,
);
enable_commit_hook();
use strict;
use IkiWiki;
use Encode;
+use File::Path qw{remove_tree};
use URI::Escape q{uri_escape_utf8};
use open qw{:utf8 :std};
}
}
-my $git_dir=undef;
-my $prefix=undef;
+my @git_dir_stack;
+my $prefix;
sub in_git_dir ($$) {
- $git_dir=shift;
+ unshift @git_dir_stack, shift;
my @ret=shift->();
- $git_dir=undef;
+ shift @git_dir_stack;
$prefix=undef;
return @ret;
}
-sub safe_git (&@) {
+# Loosely based on git-new-workdir from git contrib.
+sub create_temp_working_dir ($$) {
+ my $rootdir = shift;
+ my $branch = shift;
+ my $working = "$rootdir/.git/ikiwiki-temp-working";
+ remove_tree($working);
+
+ foreach my $dir ("", ".git") {
+ if (!mkdir("$working/$dir")) {
+ error("Unable to create $working/$dir: $!");
+ }
+ }
+
+ # Hooks are deliberately not included: we will commit to the temporary
+ # branch that is used in the temporary working tree, and we don't want
+ # to run the post-commit hook there.
+ #
+ # logs/refs is not included because we don't use the reflog.
+ # remotes, rr-cache, svn are similarly excluded.
+ foreach my $link ("config", "refs", "objects", "info", "packed-refs") {
+ if (!symlink("../../$link", "$working/.git/$link")) {
+ error("Unable to create symlink $working/.git/$link: $!");
+ }
+ }
+
+ open (my $out, '>', "$working/.git/HEAD") or
+ error("failed to write $working.git/HEAD: $!");
+ print $out "ref: refs/heads/$branch\n" or
+ error("failed to write $working.git/HEAD: $!");
+ close $out or
+ error("failed to write $working.git/HEAD: $!");
+ return $working;
+}
+
+sub safe_git {
# Start a child process safely without resorting to /bin/sh.
# Returns command output (in list content) or success state
# (in scalar context), or runs the specified data handler.
- my ($error_handler, $data_handler, @cmdline) = @_;
+ my %params = @_;
my $pid = open my $OUT, "-|";
if (!$pid) {
# In child.
# Git commands want to be in wc.
- if (! defined $git_dir) {
+ if (! @git_dir_stack) {
chdir $config{srcdir}
or error("cannot chdir to $config{srcdir}: $!");
}
else {
- chdir $git_dir
- or error("cannot chdir to $git_dir: $!");
+ chdir $git_dir_stack[0]
+ or error("cannot chdir to $git_dir_stack[0]: $!");
}
- exec @cmdline or error("Cannot exec '@cmdline': $!");
+
+ if ($params{stdout}) {
+ open(STDOUT, '>&', $params{stdout}) or error("Cannot reopen stdout: $!");
+ }
+
+ exec @{$params{cmdline}} or error("Cannot exec '@{$params{cmdline}}': $!");
}
# In parent.
chomp;
- if (! defined $data_handler) {
+ if (! defined $params{data_handler}) {
push @lines, $_;
}
else {
- last unless $data_handler->($_);
+ last unless $params{data_handler}->($_);
}
}
close $OUT;
- $error_handler->("'@cmdline' failed: $!") if $? && $error_handler;
+ $params{error_handler}->("'@{$params{cmdline}}' failed: $!") if $? && $params{error_handler};
return wantarray ? @lines : ($? == 0);
}
# Convenient wrappers.
-sub run_or_die ($@) { safe_git(\&error, undef, @_) }
-sub run_or_cry ($@) { safe_git(sub { warn @_ }, undef, @_) }
-sub run_or_non ($@) { safe_git(undef, undef, @_) }
+sub run_or_die ($@) { safe_git(error_handler => \&error, cmdline => \@_) }
+sub run_or_cry ($@) { safe_git(error_handler => sub { warn @_ }, cmdline => \@_) }
+sub run_or_non ($@) { safe_git(cmdline => \@_) }
sub ensure_committer {
if (! length $ENV{GIT_AUTHOR_NAME} || ! length $ENV{GIT_COMMITTER_NAME}) {
}
shift @{ $dt_ref } if $dt_ref->[0] =~ /^$/;
+ $ci{details} = [parse_changed_files($dt_ref)];
+
+ return \%ci;
+}
+
+sub parse_changed_files {
+ my $dt_ref = shift;
+
+ my @files;
+
# Modified files.
while (my $line = shift @{ $dt_ref }) {
if ($line =~ m{^
my $status = shift(@tmp);
if (length $file) {
- push @{ $ci{'details'} }, {
+ push @files, {
'file' => decode_git_file($file),
'sha1_from' => $sha1_from[0],
'sha1_to' => $sha1_to,
last;
}
- return \%ci;
+ return @files;
}
sub git_commit_info ($;$) {
# Check to see if the page has been changed by someone else since
# rcs_prepedit was called.
my $cur = git_sha1_file($params{file});
- my ($prev) = $params{token} =~ /^($sha1_pattern)$/; # untaint
+ my $prev;
+ if (defined $params{token}) {
+ ($prev) = $params{token} =~ /^($sha1_pattern)$/; # untaint
+ }
if (defined $cur && defined $prev && $cur ne $prev) {
my $conflict = merge_past($prev, $params{file}, $dummy_commit_msg);
elsif (defined $params{session}->remote_addr()) {
$u=$params{session}->remote_addr();
}
- if (defined $u) {
+ if (length $u) {
$u=encode_utf8(IkiWiki::cloak($u));
$ENV{GIT_AUTHOR_NAME}=$u;
}
+ else {
+ $u = 'anonymous';
+ }
if (defined $params{session}->param("nickname")) {
$u=encode_utf8($params{session}->param("nickname"));
$u=~s/\s+/_/g;
$u=~s/[^-_0-9[:alnum:]]+//g;
}
- if (defined $u) {
+ if (length $u) {
$ENV{GIT_AUTHOR_EMAIL}="$u\@web";
}
+ else {
+ $ENV{GIT_AUTHOR_EMAIL}='anonymous@web';
+ }
}
ensure_committer();
if (@lines || $line=~/^diff --git/);
return 1;
};
- safe_git(undef, $addlines, "git", "show", $sha1);
+ safe_git(
+ error_handler => undef,
+ data_handler => $addlines,
+ cmdline => ["git", "show", $sha1],
+ );
if (wantarray) {
return @lines;
}
die $@ if $@;
my $fh;
($fh, $path)=File::Temp::tempfile(undef, UNLINK => 1);
- my $cmd = "cd $git_dir && ".
- "git show $detail->{sha1_to} > '$path'";
- if (system($cmd) != 0) {
- error("failed writing temp file '$path'.");
- }
+ safe_git(
+ error_handler => sub { error("failed writing temp file '$path': ".shift."."); },
+ stdout => $fh,
+ cmdline => ['git', 'show', $detail->{sha1_to}],
+ );
}
push @rets, {
my $rev=shift;
my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint
+ my @undo; # undo stack for cleanup in case of an error
+
+ ensure_committer();
+
# Examine changes from root of git repo, not from any subdir,
# in order to see all changes.
my ($subdir, $rootdir) = git_find_root();
- in_git_dir($rootdir, sub {
+ return in_git_dir($rootdir, sub {
my @commits=git_commit_info($sha1, 1);
if (! @commits) {
error gettext("you are not allowed to revert a merge");
}
+ # Due to the presence of rename-detection, we cannot actually
+ # see what will happen in a revert without trying it.
+ # But we can guess, which is enough to rule out most changes
+ # that we won't allow reverting.
git_parse_changes(1, @commits);
+
+ my $failure;
+ my @ret;
+ eval {
+ IkiWiki::disable_commit_hook();
+ push @undo, sub {
+ IkiWiki::enable_commit_hook();
+ };
+ my $branch = "ikiwiki_revert_${sha1}"; # supposed to be unique
+
+ push @undo, sub {
+ run_or_cry('git', 'branch', '-D', $branch) if $failure;
+ };
+ if (run_or_non('git', 'rev-parse', '--quiet', '--verify', $branch)) {
+ run_or_non('git', 'branch', '-D', $branch);
+ }
+ run_or_die('git', 'branch', $branch, $config{gitmaster_branch});
+
+ my $working = create_temp_working_dir($rootdir, $branch);
+
+ push @undo, sub {
+ remove_tree($working);
+ };
+
+ in_git_dir($working, sub {
+ run_or_die('git', 'checkout', '--quiet', '--force', $branch);
+ run_or_die('git', 'revert', '--no-commit', $sha1);
+ run_or_die('git', 'commit', '-m', "revert $sha1", '-a');
+ });
+
+ my @raw_lines;
+ @raw_lines = run_or_die('git', 'diff', '--pretty=raw',
+ '--raw', '--abbrev=40', '--always', '--no-renames',
+ "..${branch}");
+
+ my $ci = {
+ details => [parse_changed_files(\@raw_lines)],
+ };
+
+ @ret = git_parse_changes(0, $ci);
+ };
+ $failure = $@;
+
+ # Process undo stack (in reverse order). By policy cleanup
+ # actions should normally print a warning on failure.
+ while (my $handle = pop @undo) {
+ $handle->();
+ }
+
+ if ($failure) {
+ my $message = sprintf(gettext("Failed to revert commit %s"), $sha1);
+ error("$message\n$failure\n");
+ }
+
+ return @ret;
});
}
ensure_committer();
- if (run_or_non('git', 'revert', '--no-commit', $sha1)) {
+ if (run_or_non('git', 'merge', '--ff-only', "ikiwiki_revert_$sha1")) {
return undef;
}
else {
- run_or_die('git', 'reset', '--hard');
+ run_or_non('git', 'branch', '-D', "ikiwiki_revert_$sha1");
return sprintf(gettext("Failed to revert commit %s"), $sha1);
}
}
if ($q->param('do') eq 'blog') {
my $page=titlepage(decode_utf8(scalar $q->param('title')));
$page=~s/(\/)/"__".ord($1)."__"/eg; # don't create subdirs
+ if (! length $page) {
+ error(gettext("please enter a page title"));
+ }
# if the page already exists, munge it to be unique
my $from=$q->param('from');
my $add="";
}
elsif ($form->submitted eq "Save Preferences" && $form->validate &&
defined $form->field("subscriptions")) {
- setsubscriptions($username, $form->field('subscriptions'));
+ setsubscriptions($username, scalar $form->field('subscriptions'));
}
}
if (exists $params{limit}) {
my $i=0;
my %show;
- foreach my $key (sort { $counts{$b} <=> $counts{$a} } keys %counts) {
+ foreach my $key (sort { $counts{$b} <=> $counts{$a} || $a cmp $b } keys %counts) {
last if ++$i > $params{limit};
$show{$key}=$counts{$key};
}
htmllink($params{page}, $params{destpage}, $_, noimageinline => 1, linktext => linktext($_, %params)).
"</td><td>".$counts{$_}."</td></tr>"
}
- sort { $counts{$b} <=> $counts{$a} } keys %counts).
+ sort { $counts{$b} <=> $counts{$a} || $a cmp $b } keys %counts).
"\n</table>\n" ;
}
else {
$form->field(
name => "password",
validate => sub {
- checkpassword($form->field("name"), shift);
+ checkpassword(scalar $form->field("name"), shift);
},
);
}
if ($form->submitted eq "Save Preferences" && $form->validate) {
my $user_name=$form->field('name');
if (defined $form->field("password") && length $form->field("password")) {
- setpassword($user_name, $form->field('password'));
+ setpassword($user_name, scalar $form->field('password'));
}
}
}
# their buttons, which is why this hook must be run last.
# The canrename/canremove hooks already ensure this is forbidden
# at the backend level, so this is only UI sugar.
- if (istranslation($form->field("page"))) {
+ if (istranslation(scalar $form->field("page"))) {
map {
for (my $i = 0; $i < @{$params{buttons}}; $i++) {
if (@{$params{buttons}}[$i] eq $_) {
my $session=$params{session};
if ($form->submitted eq "Rename" && $form->field("do") eq "edit") {
- rename_start($q, $session, 0, $form->field("page"));
+ rename_start($q, $session, 0, scalar $form->field("page"));
}
elsif ($form->submitted eq "Rename Attachment") {
my @selected=map { Encode::decode_utf8($_) } $q->param("attachment_select");
# performed in check_canrename later.
my $srcfile=IkiWiki::possibly_foolish_untaint($pagesources{$src})
if exists $pagesources{$src};
- my $dest=IkiWiki::possibly_foolish_untaint(titlepage($form->field("new_name")));
+ my $dest=IkiWiki::possibly_foolish_untaint(titlepage(scalar $form->field("new_name")));
my $destfile=$dest;
if (! $q->param("attachment")) {
my $type=$q->param('type');
+ikiwiki (3.20161229) unstable; urgency=medium
+
+ * Security: force CGI::FormBuilder->field to scalar context where
+ necessary, avoiding unintended function argument injection
+ analogous to CVE-2014-1572. In ikiwiki this could be used to
+ forge commit metadata, but thankfully nothing more serious.
+ (CVE-2016-9646)
+ * Security: try revert operations in a temporary working tree before
+ approving them. Previously, automatic rename detection could result in
+ a revert writing outside the wiki srcdir or altering a file that the
+ reverting user should not be able to alter, an authorization bypass.
+ (CVE-2016-10026 represents the original vulnerability.)
+ The incomplete fix released in 3.20161219 was not effective for git
+ versions prior to 2.8.0rc0.
+ (CVE-2016-9645 represents that incomplete solution.)
+ * Add CVE references for CVE-2016-10026
+ * Add automated test for using the CGI with git, including
+ CVE-2016-10026
+ - Build-depend on libipc-run-perl for better build-time test coverage
+ * Add missing ikiwiki.setup for the manual test for CVE-2016-10026
+ * git: don't issue a warning if the rcsinfo CGI parameter is undefined
+ * git: do not fail to commit changes with a recent git version
+ and an anonymous committer
+
+ -- Simon McVittie <smcv@debian.org> Thu, 29 Dec 2016 17:36:15 +0000
+
+ikiwiki (3.20161219) unstable; urgency=medium
+
+ [ Joey Hess ]
+ * inline: Prevent creating a file named ".mdwn" when the
+ postform is submitted with an empty title.
+
+ [ Simon McVittie ]
+ * Security: tell `git revert` not to follow renames. If it does, then
+ renaming a file can result in a revert writing outside the wiki srcdir
+ or altering a file that the reverting user should not be able to alter,
+ an authorization bypass. Thanks, intrigeri. (CVE-2016-10026)
+ * cgitemplate: remove some dead code. Thanks, blipvert
+ * Restrict CSS matches against header class to not break
+ Pandoc tables with header rows. Thanks, karsk
+ * Make pagestats output more deterministic. Thanks, intrigeri
+
+ -- Simon McVittie <smcv@debian.org> Mon, 19 Dec 2016 20:34:23 +0000
+
ikiwiki (3.20160905~bpo8+1) jessie-backports; urgency=medium
* Rebuild for jessie-backports
libhtml-parser-perl,
libhtml-scrubber-perl,
libhtml-template-perl,
+ libipc-run-perl,
libimage-magick-perl | perlmagick,
libmagickcore-extra,
libnet-openid-consumer-perl,
License: GPL-2+
Files: IkiWiki/Plugin/cvs.pm
-Copyright: © 2009 Amitai Schlair <http://www.schmonz.com/>
+Copyright: © 2009 Amitai Schleier <http://www.schmonz.com/>
License: BSD-2-clause
Files: IkiWiki/Plugin/rsync.pm
-Copyright: © 2009 Amitai Schlair <http://www.schmonz.com/>
+Copyright: © 2009 Amitai Schleier <http://www.schmonz.com/>
License: BSD-2-clause
Files: IkiWiki/Plugin/osm.pm
Also see the [Debian bugs](http://bugs.debian.org/ikiwiki).
+If you are reporting a security vulnerability, please email the maintainers
+privately, instead of making it public by listing it here. See [[security]]
+for contact details.
+
There are [[!pagecount pages="bugs/* and !bugs/done and !bugs/discussion and
!link(patch) and !link(bugs/done) and !bugs/*/*"
feedpages="created_after(bugs/no_commit_mails_for_new_pages)"]] "open" bugs:
-# Bug Descriptipn
+# Bug Description
If color and toc plugins are enabled and you use colored headers, those headers are never colored but sometimes are prefixed with text artifacts like "color: red".
There are a few possible solutions to this depending on how it should work:
1. The easiest thing would be to just add a "last" parameter to the toc plugin format hook (or "first" to the color plugin). Result: No color in tocs at all
-1. Adding four (**EDIT** seven) lines to toc.pm (and possibly removing ~4 now superfluous lines **EDIT** No) would make it preserve ALL markup in headers, color as well as html markup or markdown (*emphasize* for example). Execution order of the plugins would not matter at all
+1. Adding seven lines to toc.pm would make it preserve ALL markup in headers, color as well as html markup or markdown (*emphasize* for example). Execution order of the plugins would not matter at all
1. A bit more code would be necessary to just specifically preserve the color, but nothing else
I would propose implementing the second option because visual markers in headers are useful to convey additional information very fast and this information should be preserved in the toc. Example: Bug or task/project tracker with color conveying status of the bug or task.
-It seems you can stuff anything into ordered lists (according to w3.orgs doku), so apart from stylistic reasons I don't see any problems with markup in the toc.
+It seems you can stuff anything into ordered lists (according to w3.orgs doku), so apart from stylistic reasons and suboptimal display of links in headers (see below) I don't see any problems with markup in the toc.
# Patch
-This is the proposed patch to the second solution. I did not yet test it with the latest version of ikiwiki, but I did check that both plugins are identical in my test versions and the latest. I will update my wikis in use to the latest version and test it further, anyway.
+This is the proposed patch to the second solution. Tested with the latest version. It works with all markup and markdown I could think of. The only case not handled optimal is if the header is just a link and nothing else, then there is no text left for the local link, the toc links directly to a different page. Is that acceptable or not?
-The part that could probably be removed in toc is the handler call "$p->handler(text => sub {" in line 110. It collects all text in the header as HTML::Parser "dtext", which means entities are decoded in the text. Since that step is probably already done in ikiwiki or doesn't need to be done (otherwise ikiwiki with toc.pm disabled would not work correctly) I'm pretty sure the "dtext" is not necessary. And in that case the patch below would just collect that text in the default handler. Not tested at all, I want to hear a second opinion first.
-**EDIT** Ok, the handler call is still necessary, but the "dtext" could be changed to "text". Also I needed to add 3 more lines, the patch below is up to date. It works with all markup and markdown I could think of. The only case not handled optimal is if the header is just a link and nothing else, then there is no text left for the local link, the toc links directly to a different page. Is that acceptable or not?
-
-(Should I upload this patch as a branch to ikiwiki.info? Not sure about how patch submission works here)
diff --git a/IkiWiki/Plugin/toc.pm b/IkiWiki/Plugin/toc.pm
index ac07b9a..5c2b056 100644
}
}, "tagname, text");
$p->handler(default => sub { $page.=join("", @_) }, "text");
- --
- 1.8.4.5
+
+
+[[!tag patch]]
--- /dev/null
+When using inline with postform=yes, the user can click on the edit button
+without providing a title, and are allowed to save the page. This results
+in a file with a name like ".mdwn", which ikiwiki won't render.
+--[[Joey]]
+
+> [[done]]; made it error out in this case.
--- /dev/null
+Hi! While working on Reproducible Builds for Tails, we noticed that
+the pagestats plugin's output is not deterministic: pages that have
+the same number of hits (counts) are sorted in hash order.
+
+The `pagestats-determinism` branch in the
+<https://git-tails.immerda.ch/ikiwiki.git> Git repository has a fix
+for this problem.
+
+--[[intrigeri]]
+
+[[!tag patch]]
--- /dev/null
+1. We have a `$srcdir/writable/page.mdwn` source file in Git.
+2. ikiwiki is configured to allow edits via the CGI in `writable/*`,
+ but nowhere else.
+2. Modify `$srcdir/writable/page.mdwn`, commit ⇒ commit `$id`.
+3. `git mv $srcdir/writable/page.mdwn $srcdir/read-only/page.mdwn`
+
+⇒ The web interface allows reverting commit `$id` (presumably because
+it changes files only in `$srcdir/writable`). This operation
+effectively modifies `$srcdir/read-only/page.mdwn`, which feels wrong.
+My guess is that `check_canchange` does not take into account that Git
+will automatically detect that the file affected by the to-be-reverted
+commit has moved, and modify the file in its new location
+when reverting.
+
+> Working on it. In future please report non-public security
+> vulnerabilities (such as authorization bypass) by private email to the
+> maintainers, so that they are not visible to the general public
+> until we have had a chance to fix the bug. --[[smcv]]
+
+>> Sorry about that, I should clearly know better :/ --[[intrigeri]]
+
+> Fixed by using
+> `git revert --strategy=recursive --strategy-option=no-renames`.
+> I tried to do something more clever (doing the revert, and checking
+> whether it made changes that aren't allowed) but couldn't get it to
+> work in a reasonable time, so I'm going with the simpler fix.
+> [[Fix committed|done]], a release will follow later today.
+>
+> [[!cve CVE-2016-10026]] has been assigned to this vulnerability.
+> --[[smcv]]
+
+>> You rock, thanks a lot! --[[intrigeri]]
--- /dev/null
+When I run `ikiwiki` with the ``--rebuild`` option (or only with the `--setup file.setup` option a map directive like `\[[!map pages="*" show=title]]` generates a page map as if it didn't contain any `show` parameter. Only after I manually edit something which causes the page containing the map directive to be rebuilt is the page map regenerated without ignoring the `show` parameter.
--- /dev/null
+This may, strictly speaking, be a bug in the [[plugins/contrib/pandoc]] plugin, but I think it would be better to fix it in ikiwiki because of its kind (and maybe because I believe/hope pandoc will become the markdown dialect standard). For all I know it might not only affect pandoc tables.
+
+When creating a simple table in pandoc-flavoured markdown,
+
+ 1 2
+ --- ---
+ 3 4
+
+pandoc converts this to the html code
+
+ <table>
+ <thead>
+ <tr class="header">
+ <th align="left">1</th>
+ <th align="left">2</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr class="odd">
+ <td align="left">3</td>
+ <td align="left">4</td>
+ </tr>
+ </tbody>
+ </table>
+
+`<tr class="header">` causes it to be affected by `style.css`'s
+
+ .header {
+ margin: 0;
+ font-size: 140%;
+ font-weight: bold;
+ line-height: 1em;
+ display: block;
+ }
+
+(more specifically by `display: block;`), which results in all header cells to cramp together in the first column.
+
+The fix is easy: In `style.css` change `.header {` to `.header tr:not(.header) {`.
+
+Alternatively, add the following code.
+
+ tr.header {
+ display: table-row;
+ }
+
+I've added that last code snippet to my `custom.css` file. I admit `.header tr:not(.header)` is not especially elegant, but then again, I have almost no knowledge of CSS. There might be better solutions. (I don't even know why `display: block;` breaks the tables or why changing it to `display: table-header;` doesn't fix it but `display: table-row;` does :D )
+
+> This is essentially a conflict between ikiwiki's expectations for the
+> definitions of CSS classes, and pandoc's expectations. The ikiwiki
+> templates use `class="header"` to mean essentially the same thing
+> as a HTML5 `<header>`, while Pandoc assumes a different meaning.
+>
+> I think `div.header, header.header {` is probably a cleaner fix,
+> and I have [[done]] that.
+>
+> FYI, `display: block` breaks the tables because it makes the `<tr>` not
+> be treated as a table row by the browser's layout engine.
+> `table-header` is not a valid
+> [value for the CSS `display` attribute](https://developer.mozilla.org/en-US/docs/Web/CSS/display)
+> so that won't work.
+>
+> --[[smcv]]
--- /dev/null
+In commits by Simon McVittie on Oct 5, 2014, the following was added to `cgitemplate()`:
+
+ b0a35c81 (Simon McVittie 2014-10-05 61) my $topurl = $config{url};
+ 3b8da667 (Simon McVittie 2014-10-05 62) if (defined $cgi && ! $config{w3mmode} && ! $config{reverse_proxy}) {
+ b0a35c81 (Simon McVittie 2014-10-05 63) $topurl = $cgi->url;
+ b0a35c81 (Simon McVittie 2014-10-05 64) }
+
+I am trying to determine what was intended by this change. The variable `$topurl` is not used again in this function, so this is essentially dead code.
+--[[blipvert]]
+
+> If you look at `git log -p IkiWiki/CGI.pm` you'll see that *at the time*, `$topurl`
+> was used further down the function. Later in the branch, [commit 33f6026
+"In html5 mode, generate a host- or protocol-relative <base> for the
+CGI"](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=33f60260b233d0310ce6dd4304304a516595b906)
+> made this conditional on `! $config{html5}`.
+>
+> Somewhat later,
+> [commit 490a1ec
+"Always produce HTML5 doctype and new attributes, but not new
+elements"](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=490a1eca7bed841848765b495a73fbc56e4808f4)
+> repurposed `$config{html5}` from "use HTML5" to "use new HTML5 elements" -
+> which meant that [commit a052771
+"Now that we're always using HTML5, <base href> can be
+relative"](http://source.ikiwiki.branchable.com/?p=source.git;a=commit;h=a05277128732beb351aa696c49d337086414ffb6)
+> could remove the only code that used `$topurl`.
+>
+> You are correct to say that computing `$topurl` is now dead code, and I
+> have removed it. [[done]] --[[smcv]]
available to do consulting or other work on ikiwiki.
* [[Joey]] wrote ikiwiki. He is available for consulting on a part-time basis.
-* [[Amitai Schlair]] (a.k.a. [[schmonz]]) wrote [[rcs/cvs]],
+* [[Amitai Schleier]] (a.k.a. [[schmonz]]) wrote [[rcs/cvs]],
[[plugins/rsync]], and [[todo/fancypodcast]], among other things.
Contact him via [his website](http://www.schmonz.com/).
You could also drop by the IRC channel `#ikiwiki` on
[OFTC](http://www.oftc.net/) (`irc.oftc.net`).
+
+However, if you find a new security vulnerability, please email the maintainers
+privately instead of raising it in a public medium, so that we can
+arrange for coordinated disclosure when a fix is available. The maintainers
+are [[Joey Hess|joey]] (<joey@kitenet.net>),
+[[Simon McVittie|smcv]] (<smcv@debian.org>)
+and [[Amitai Schleier|schmonz]] (<schmonz-web-ikiwiki@schmonz.com>).
These templates are known to work with ikiwiki 2.31, and since I'll install always the newest one on my server I'll will update them on a regular basis.
* (This link appears to be broken?)
+----
+
+Perhaps suggesting people make a new page with their css code in it on ikiwiki.info/css market/ to link to would avoid dead links in the future?
--- /dev/null
+Added ```- 404``` under plugins in my .setup file. However, when I view it from the web it shows as disabled, and ikiwiki 404 doesn't work.
+
+Tried removing and adding back in and rebuilding each time but still the same issue.
+
+Any ideas?
+++ /dev/null
-Hello.
-
-Sorry to disturb here, but I'm struggling running ikiwiki under Arch on a RaspberryPi.
-I have a default user: alarm that both runs the nginx server and which created the ikiwiki site.
-Everything sits in the home folder.
-
-I've followed the instructions [here](http://ikiwiki.info/tips/dot_cgi/#index3h2) regarding the configuration of FastCGI, but there is a slight mistake in it I think.
-Nowadays, the ikiwiki.cgi sits in the subfolder that is the same as the wiki name under public_html/ and not directly under public_html/. But it does not really matter. I corrected that in my script.
-
-But somehow, even if I play around with the fastcgi parameters, I either get a 403, or the server is trying to send me the ikiwiki.cgi file to download, but does not run it.
-
-I've changed the permissions on the socket, I even tried to run the server as root, nothing changes. Still same errors. If anyone can help, I'd appreciate.
-
--- /dev/null
+Hello,
+I am writing a plugin that uses the timezone. Ikiwiki.pm [defines the default timezone](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=IkiWiki.pm;h=fa71f479107a2388fde2fe00a67bfa2daa4fb3a9;hb=HEAD#l638) to ``:/etc/localtime``. The problem is that I do not know how to parse this.
+
+In my code, I have lines like ``$now = DateTime->now(time_zone => $config{timezone});`` or ``$thistime = DateTime->from_epoch(epoch=>$thistime, time_zone=>$config{timezone});``. They work well when timezone is something like ``Europe/Paris``, but with the default ``:/etc/localtime``, I get the error message ``The timezone ':/etc/localtime' is an invalid name.``
+
+Is there a way to automatically recognize both ``Europe/Paris`` and ``:/etc/localtime``? Or should I add something like the following in my code?
+
+ if ($config{timezone} eq ":/etc/localtime") {
+ $config{timezone} = DateTime::TimeZone->new(name=>'local')->name();
+ }
+
+Regards,
+[[Louis|spalax]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ avatar="http://cdn.libravatar.org/avatar/0ee943fe632ff995f6f0f25b7167d03b"
+ subject="comment 1"
+ date="2016-12-26T19:26:25Z"
+ content="""
+`:/etc/localtime` is a glibc'ism, added to solve
+[[bugs/without_timezone,_excessive_statting_causes_slowness]]. It means
+\"read the contents or symlink destination of `/etc/localtime` and use that as
+the active time zone\".
+
+I would not recommend parsing that string, although you could.
+
+ikiwiki sets the `TZ` environment variable to either `$config{timezone}`
+or that default value during startup; so can't your plugin just use
+local time unconditionally, via `time_zone => 'local'`, without ever
+caring about which specific time zone that means?
+
+(For example, the standard `IkiWiki::formattime` uses `localtime($time)` which
+is basically a non-OO version of
+`DateTime->from_epoch(epoch => $time, time_zone => 'local')`.)
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="spalax"
+ avatar="http://cdn.libravatar.org/avatar/3f1353e4135221fc25bfecd1b812bcc8"
+ subject="comment 2"
+ date="2016-12-26T22:03:27Z"
+ content="""
+> [...] can't your plugin just use local time unconditionally, via time_zone => 'local' [...]?
+
+Perfect! Thanks.
+
+-- [[Louis|spalax]]
+"""]]
--- /dev/null
+I host a static ikiwiki site with pages written in Norwegian. To the end user, not much English is visible except for "Last edited" on the bottom and RecentChanges on top of every page.
+
+I'd like ikiwiki to translate these terms (and preferably also localize the time and date displayed). What is the proper way to do this?
+
+I've looked at [[translation]], [[plugins/po]] and I still haven't got a clue. Translating the RecentChanges page itself is not a concern.
+
+Thanks in advance!
+
+PS: As much as I admire the software of Joey Hess, I'm amazed over and over of how hard it is for me to do certain easy things with it, and how it seems like no one else considers this to be a difficulty. It's like the software presupposed a type of thinking one acquires when coding? working a lot with git etc? I don't know. Is is only me? (Semivalid example: Using my ikiwiki setup-file and wiki pages to generate a site with all dynamic stuff enabled (meant as a read-only wiki) Wouldn't a lot of ikiwiki users like to do this? The example is semivalid because apparently [someone else](https://wiki.math.cmu.edu/iki/wiki/tips/20130930-ikiwiki/020-local-preview.html) found this intuitive too.)
--- /dev/null
+[[!comment format=mdwn
+ username="spalax"
+ subject="Translating "Last edited""
+ date="2016-09-27T19:08:29Z"
+ content="""
+Since the \"last edited\" text is hard coded (see line 209 of [the page template](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=templates/page.tmpl;hb=72c3b81efb1079f8db070ac89e97e9b7bcedd61b#l209)), I think that the only way to translate it is to copy the page template into your website source, and translate the text in it.
+
+Concerning the localized time, I wonder if ensuring that your LANG environment variable is correctly set is sufficient…
+"""]]
--- /dev/null
+Ikiwiki version: ikiwiki version 3.20160905~bpo8+1 running on Debian Jessie.
+
+I have a page with directives something like the following:
+
+ \[[!edittemplate match="worklog/" template="templates/worklog.tmpl" silent="yes"]]
+ \[[!inline pages="worklog/*" rootpage=worklog postformtext="Record worklog entry for {{$title}}" archive="no" show="0"]]
+
+The template `worklog.tmpl` looks something like this:
+
+ Time: <TMPL_VAR TIME>
+ Title: <TMPL_VAR TITLE>
+ MTIME: <TMPL_VAR MTIME>
+
+The form to create a worklog renders, and I can click the button. I get the right template. But the rendered template looks something like
+
+ Time: 2016-09-18T04:57:54Z
+ Title:
+ MTIME:
+
+In other words: `<TMPL_VAR TIME>` works, and no other `TMPL_VAR` directives do (that I have been able to find). They all seem to be blank. I have been doing some source diving and it looks as if they ought to exist, but nothing is ever rendered.
+
+My long-term goal is to make some custom template variables, but at this point I cannot even get the builtins working, other than `time`.
+
+I am using a bunch of other plugins like `getfield` and `field`, but I do not know if any of them are the culprit. Here is the list from my `ikiwiki.setup` file:
+
+ add_plugins => [qw{goodstuff prettydate templatebody
+ template edittemplate httpauth date
+ attachment inline field getfield ymlfront
+ listdirectives sqlite_search sidebar}],
+
+
+What am I doing wrong?
--- /dev/null
+Hello.
+
+Sorry to disturb here, but I'm struggling running ikiwiki under Arch on a RaspberryPi.
+I have a default user: alarm that both runs the nginx server and which created the ikiwiki site.
+Everything sits in the home folder.
+
+I've followed the instructions [here](http://ikiwiki.info/tips/dot_cgi/#index3h2) regarding the configuration of FastCGI, but there is a slight mistake in it I think.
+Nowadays, the ikiwiki.cgi sits in the subfolder that is the same as the wiki name under public_html/ and not directly under public_html/. But it does not really matter. I corrected that in my script.
+
+But somehow, even if I play around with the fastcgi parameters, I either get a 403, or the server is trying to send me the ikiwiki.cgi file to download, but does not run it.
+
+I've changed the permissions on the socket, I even tried to run the server as root, nothing changes. Still same errors. If anyone can help, I'd appreciate.
+
+-- Update
+**Pseudo solution** : Solved my problem by switching from Nginx to Apache. Somewhow handling .cgi scripts with Apache is less trouble some.
+
+
--- /dev/null
+[[!comment format=mdwn
+ username="alexjj@97b75209148c043997fe05b4341a629090820035"
+ nickname="alexjj"
+ subject="ever fix this?"
+ date="2016-09-20T16:59:26Z"
+ content="""
+Did you find a fix? I've just installed ikiwiki and get the same error. nginx reports no errors and I'm not sure where to look to find out what is wrong. Is it file permissions on server or what?
+"""]]
--- /dev/null
+Hey everyone, I have a local "laptop" wiki which uses git as it's version control system. I would like my wiki to be mirrored by my hoster (nearlyfreespeech) so I can browse and edit it on the go as well as have an offsite backup of it if my laptop should ever die. In the last three hours I figured out that:
+
+1. I need to install ikiwiki on my nearlyfreespeech site
+2. I need to create a remote for my local repository on my nfs site.
+3. I need to setup ikiwiki on nfs
+4. I need to mirror/sync continously my local laptop repo with the main repo on my server (probably through the remote)
+
+So far I figured out parts of this plan. My status quo is the following:
+
+- laptop:
+ - srcdir: ~/wiki
+ - destdir: ~/sites/wiki (this shouldn't matter since it points to my local webserver setup)
+ - repository: ~/wiki.git
+- nfs:
+ - srcdir: /home/private/wiki
+ - destdir: /home/public
+ - repository: /home/private/2wiki.git
+ - remote repository: /home/private/wiki.git (configured as a remote named "nfswiki" on my laptop)
+
+On my laptop I can now go into ~/wiki, edit some files and afterwards can invoke ikiwiki --setup ~/wiki.setup which will generate a local version of my site for me. If I want to update my server copy, I can go into ~/wiki do git add ., git commit -m "Update", git push nfswiki master (which I hope is the correct way of doing things???). Afterwards I should have a (bare) repo on my nfs server with the same contents as my local (bare) repo, since I setup my remote with my local (bare) repo, which gets updated whenever I update my working copy (= srcdir).
+
+On my server I have installed ikiwiki more or less as described [here](https://ikiwiki.info/tips/nearlyfreespeech/). I setup this wiki by using the auto.setup method. It works. I basically have an empty wiki waiting to be filled.
+
+But how do I now create the plumbing necessary to let me…
+
+- connect the remote of my laptop repo to the nfs repo?
+- connect the nfs repo to the laptop repo through my remote?
+- edit either wiki (local/nfs) and the changes get synced to both wikis?
+
+Here my sparse understanding (which is still a generous way to put it) of git is simply not enough.
+
+Pages I have checked out:
+
+- [nearlyfreespeech](https://ikiwiki.info/tips/nearlyfreespeech/)
+- [distributed wikis](https://ikiwiki.info/tips/distributed_wikis/)
+- [laptop wiki with git](https://ikiwiki.info/tips/laptop_wiki_with_git/)
+- [byhand](https://ikiwiki.info/setup/byhand/)
+- [setup](https://ikiwiki.info/setup/)
+
+I'm very very thankful for any suggestions, since I have myself commited to solve any problems (and at least kinda understand what is involved here) to make this work at least to a degree that I can replicate the results in similar situations.
+
+Thanks for reading and for any tips that you can offer towards making me understand this admittedly complicated and involved question.
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ subject="comment 1"
+ date="2016-11-03T22:13:14Z"
+ content="""
+I was able to make some progress with this by taking a somewhat different approach. First of all I understand now a little better, that the above setup could be made to work, but it is easier to setup a wiki on my server by hand and then setting up the server repository as a remote of my local repository. After merging/combining the two repos (e.g.: pulling from the server with `--allow-unrelated-histories` and then pushing the local repo to the server) I had a state in which I could let ikiwiki run on my server clone of the local wiki.
+
+There's still work to do. I run into problems with exceeding the cpu time limit when trying to do a full rebuild which, because of the complexity of the wiki takes a while. Is there a way to do cumulative rebuilds or something like that?
+
+I also will have to see how well plugins will work on nearly free speech.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ subject="comment 2"
+ date="2016-11-06T19:36:24Z"
+ content="""
+Another update on this, because it's related to running ikiwiki on nearly free speech: ikiwiki, xapian (and omega; for search), pandoc and pandoc-cteproc and I believe all perl modules one would need to run all of this are installed system wide on the white beta realm. So getting an ikiwiki up and running is nowadays a piece of cake and consists of calling `ikiwiki --setup /usr/local/etc/ikiwiki/auto.setup`, filling in the correct infos and afterwards customizing the created `.setup` file further. That's already it. Plugins can be installed to `~/.ikiwiki/IkiWiki/Plugin`. By setting a `templatedir` in the `.setup`you can have a templatedir in your user's dir.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ subject="comment 3"
+ date="2016-11-10T17:09:41Z"
+ content="""
+[See also my problem with doing a complete rebuild with a time limit…](http://ikiwiki.info/forum/large_wiki:_is_running_ikiwiki_in_steps_possible__63__/)
+"""]]
--- /dev/null
+Because of the complexity and size of my wiki, I am unable to let ikiwiki run it's course on my shared hosting provider of choice. The maximum time allowed for a rebuild as is dictated by the maximum amount of cpu time I can assign myself to is 10 minutes. What I wanted to know is: Is there a way to run ikiwiki in chunks of 10 minutes steps or something like that?
--- /dev/null
+[[!comment format=mdwn
+ username="openmedi"
+ subject="comment 1"
+ date="2016-11-10T17:06:23Z"
+ content="""
+[This is connected to my \"laptop wiki with git - but the other way around\" question.](http://ikiwiki.info/forum/laptop_wiki_with_git_-_but_the_other_way_around__63__/)
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="smcv"
+ avatar="http://cdn.libravatar.org/avatar/0ee943fe632ff995f6f0f25b7167d03b"
+ subject="no, not supported"
+ date="2016-12-19T17:23:06Z"
+ content="""
+> What I wanted to know is: Is there a way to run ikiwiki in chunks of 10 minutes
+> steps or something like that?
+
+No, this is not supported. I don't think ikiwiki is suitable for use with your wiki
+on that hosting provider.
+
+If you don't need the CGI, you could \"compile\" the wiki offline (on your laptop)
+and upload the resulting `$destdir` to the hosting provider - effectively
+treating the hosting provider as simple static storage.
+"""]]
* [[chrysn]] `git://prometheus.amsuess.com/ikiwiki`
* [[simonraven]] (unavailable) `git://github.com/kjikaqawej/ikiwiki-simon.git`
* [[schmonz]] `git://github.com/schmonz/ikiwiki.git`
-* [[will]] `http://www.cse.unsw.edu.au/~willu/ikiwiki.git`
+* [[will]] (unavailable) `http://www.cse.unsw.edu.au/~willu/ikiwiki.git`
* [[kaizer]] `git://github.com/engla/ikiwiki.git`
* [[bbb]] (unavailable) `http://git.boulgour.com/bbb/ikiwiki.git`
* [[KathrynAndersen]] `git://github.com/rubykat/ikiplugins.git`
* [[ktf]] `git://github.com/ktf/ikiwiki.git`
* [[tove]] `git://github.com/tove/ikiwiki.git`
-* [[GiuseppeBilotta]] `git://git.oblomov.eu/ikiwiki`
+* [[GiuseppeBilotta]] (unavailable) `git://git.oblomov.eu/ikiwiki`
* [[roktas]] (unavailable) `git://github.com/roktas/ikiwiki.git`
* [[davrieb|David_Riebenbauer]] (unavailable) `git://git.liegesta.at/git/ikiwiki`
([browse](http://git.liegesta.at/?p=ikiwiki.git;a=summary))
* anderbubble `git://civilfritz.net/ikiwiki.git`
* frioux `git://github.com/frioux/ikiwiki`
* llipavsky `git://github.com/llipavsky/ikiwiki`
-* [[cbaines]] `git://git.cbaines.net/ikiwiki`
+* [[cbaines]] (unavailable) `git://git.cbaines.net/ikiwiki`
* [[mhameed]] `git://github.com/mhameed/ikiwiki.git`
* [[spalax]] `git://github.com/paternal/ikiwiki.git` ([[browse|https://github.com/paternal/ikiwiki]])
* [[jcflack]] `git://github.com/jcflack/ikiwiki.git`
\[[!progress totalpages="* and !*/Discussion" donepages="*/Discussion"]]
+Rendering:
+[[!progress totalpages="* and !*/Discussion" donepages="*/Discussion"]]
+
[[!meta robots="noindex, follow"]]
* [KheOps's blog](https://w.ceops.eu/words/)
* [Stig Sandbeck Mathisen](http://fnord.no/) - Personal site and blog, with a bootstrap theme, and varnish frontend.
* Kalle Söderman: [Seen Architecture](http://img.kalleswork.net), [Stockholm Project](http://stockholm.kalleswork.net) - Mainly -image galleries using the album and osm plugins with a customized html5 theme.
-* [James Technotes](http://jamestechnotes.com), my [wiki](http://jamestechnotes.com) and [blog](http://jamestechnotes.com/blog).
-* [Amitai Schlair's site](http://www.schmonz.com/)
+* James Richardson's [wiki](https://jamestechnotes.com), [blog](https://jamesrichardson.name), and online [resume](https://resume.jamesrichardson.name).
+* [Amitai Schleier's site](https://schmonz.com/)
* My ([[spalax]]) [professional website](http://paternault.fr)
* [Aloodo Blog](http://blog.aloodo.org/)
* Ninguém tem blog! - Restricted ikiwiki hosting
* [Sean Whitton's personal website](http://spwhitton.name/)
* [Matto's personal website](https://box.matto.nl)
* [Rob Sayers' personal website](http://www.robsayers.com)
+* [Svetlana Tkachenko's personal website](http://svetlana.nfshost.com) - personal site, no blog
+++ /dev/null
-News for ikiwiki 3.20150610:
-
- The new [[plugins/emailauth]] plugin allows users to authenticate using an email
- address, without otherwise creating an account.
- The [[plugins/openid]] plugin now enables emailauth by default. Please include
- emailauth in the `disable_plugins` setting if this is not desired.
- Conversely, if emailauth is required on a wiki that does not enable
- openid, you can list it in the `enable_plugins` setting.
-
-ikiwiki 3.20150610 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * [ [[Joey Hess|joey]] ]
- * New [[plugins/emailauth]] plugin lets users log in, without any registration,
- by simply clicking on a link in an email.
- * Re-remove google from [[plugins/openid]] selector; their openid provider is
- gone for good.
- * Make the openid selector display "Password" instead of "Other"
- when appropriate, so users are more likely to click on it when
- they don't have an openid.
- * Converted openid-selector into a more generic loginselector helper
- plugin.
- * passwordauth: Don't allow registering accounts that look like openids.
- * Make cgiurl output deterministic, not hash order. Closes: #[785738](https://bugs.debian.org/785738)
- Thanks, Daniel Kahn Gillmor
- * [ [[Simon McVittie|smcv]] ]
- * Do not enable emailauth by default, to avoid surprises on httpauth-only
- sites. Enable it by default in openid instead, since it is essentially
- a replacement for OpenIDs.
- * Make the attachment plugin work with CGI.pm 4.x (Closes: #[786586](https://bugs.debian.org/786586);
- workaround for #[786587](https://bugs.debian.org/786587) in libcgi-pm-perl)
- * Add a public-domain email icon from tango-icon-theme
- * Populate pagectime from either mtime or inode change time,
- whichever is older, again for more reproducible builds
- * debian: build the docwiki with LC\_ALL=C.UTF-8 and TZ=UTC
- * debian/copyright: consolidate permissive licenses
- * debian/copyright: turn comments on provenance into Comment
- * [[plugins/brokenlinks]]: sort the pages that link to the missing page, for
- better reproducibility
- * Add \[[!meta date]] to news items and tips, since the git checkout
- and build process can leave the checkout date in the tarball
- release, leading to unstable sorting
- * Sort backlinks deterministically, by falling back to sorting by href
- if the link text is identical
- * Add a `$config{deterministic}` option and use it for the docwiki
- * [[plugins/haiku]]: if deterministic build is requested, return a hard-coded haiku
- * [[plugins/polygen]]: if deterministic build is requested, use a well-known random seed"""]]
+++ /dev/null
-ikiwiki 3.20150614 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * inline: change default sort order from age to "age title" for
- determinism, partially fixing deterministic build for git-annex,
- ikiwiki-hosting etc. (Closes: #[785757](http://bugs.debian.org/785757))
- * img: avoid ImageMagick misinterpreting filenames containing a colon
- * img test: set old timestamp on source file that will change, so that
- the test will pass even if it takes less than 1 second"""]]
\ No newline at end of file
+++ /dev/null
-ikiwiki 3.20160121 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * [ [[Amitai Schlair|schmonz]] ]
- * [[plugins/meta]]: Fix `\[[!meta name=foo]]` by closing the open quote.
- * Avoid unescaped `{` in regular expressions
- * meta test: Add tests for many behaviors of the directive.
- * img test: Bail gracefully when [[!cpan ImageMagick]] is not present.
- * [ [[Joey Hess|joey]] ]
- * [[plugins/emailauth]]: Added `emailauth_sender` config.
- * Modified `page.tmpl` to to set html `lang=` and `dir=` when
- values have been specified for them, which the [[plugins/po|po plugin]] does.
- * Specifically license the javascript underlay under the permissive
- basewiki license.
- * [ [[Simon McVittie|smcv]] ]
- * [[plugins/git]]: if no committer identity is known, set it to
- `IkiWiki <ikiwiki.info>` in `.git/config`. This resolves commit errors
- in versions of git that require a non-trivial committer identity.
- * [[plugins/inline]], [[plugins/trail]]: rename `show`, `feedshow` parameters to `limit`, `feedlimit`
- (with backwards compatibility)
- * [[plugins/pagestats]]: add `show` option to show [[plugins/meta]] fields. Thanks, [[Louis|spalax]]
- * [[plugins/inline]]: force RSS `<comments>` to be a fully absolute URL as required
- by the W3C validator. Please use Atom feeds if relative URLs are
- desirable on your site.
- * [[plugins/inline]]: add `<atom:link rel="self">` to RSS feeds as recommended by
- the W3C validator
- * [[plugins/inline]]: do not produce links containing `/./` or `/../`
- * syslog: accept and encode UTF-8 messages
- * syslog: don't fail to log if the wiki name contains `%s`
- * Change dependencies from transitional package [[!debpkg perlmagick]]
- to [[!debpkg libimage-magick-perl]] (Closes: #[789221](http://bugs.debian.org/789221))
- * debian/copyright: update for the rename of `openid-selector` to
- `login-selector`
- * d/control: remove leading article from Description
- (lintian: description-synopsis-starts-with-article)
- * d/control: Standards-Version: 3.9.6, no changes required
- * Wrap and sort control files (`wrap-and-sort -abst`)
- * Silence "used only once: possible typo" warnings for variables
- that are part of modules' APIs
- * Run [[!debpkg autopkgtest]] tests using [[!debpkg autodep8]] and the pkg-perl team's
- infrastructure
- * Add enough build-dependencies to run all tests, except for
- non-git VCSs
- * tests: consistently use `done_testing` instead of `no_plan`
- * `t/img.t`: do not spuriously skip
- * img test: skip testing PDFs if unsupported
- * img test: use the right filenames when testing that deletion occurs"""]]
--- /dev/null
+ikiwiki 3.20160905 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * [ Joey Hess ]
+ * Fix installation when prefix includes a string metacharacter.
+ Thanks, Sam Hathaway.
+ * [ Simon McVittie ]
+ * Use git log --no-renames to generate recentchanges, fixing the git
+ test-case with git 2.9 (Closes: #[835612](http://bugs.debian.org/835612))"""]]
\ No newline at end of file
--- /dev/null
+ikiwiki 3.20161219 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * [ Joey Hess ]
+ * inline: Prevent creating a file named ".mdwn" when the
+ postform is submitted with an empty title.
+ * [ Simon McVittie ]
+ * Security: tell `git revert` not to follow renames. If it does, then
+ renaming a file can result in a revert writing outside the wiki srcdir
+ or altering a file that the reverting user should not be able to alter,
+ an authorization bypass. Thanks, intrigeri. ([[!cve CVE-2016-10026]])
+ * cgitemplate: remove some dead code. Thanks, blipvert
+ * Restrict CSS matches against header class to not break
+ Pandoc tables with header rows. Thanks, karsk
+ * Make pagestats output more deterministic. Thanks, intrigeri"""]]
--- /dev/null
+ikiwiki 3.20161229 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Security: force CGI::FormBuilder->field to scalar context where
+ necessary, avoiding unintended function argument injection
+ analogous to [[!cve CVE-2014-1572]]. In ikiwiki this could be used to
+ forge commit metadata, but thankfully nothing more serious.
+ ([[!cve CVE-2016-9646]])
+ * Security: try revert operations in a temporary working tree before
+ approving them. Previously, automatic rename detection could result in
+ a revert writing outside the wiki srcdir or altering a file that the
+ reverting user should not be able to alter, an authorization bypass.
+ ([[!cve CVE-2016-10026]] represents the original vulnerability.)
+ The incomplete fix released in 3.20161219 was not effective for git
+ versions prior to 2.8.0rc0.
+ ([[!cve CVE-2016-9645]] represents that incomplete solution.)
+ * Add CVE references for CVE-2016-10026
+ * Add automated test for using the CGI with git, including
+ CVE-2016-10026
+ - Build-depend on libipc-run-perl for better build-time test coverage
+ * Add missing ikiwiki.setup for the manual test for CVE-2016-10026
+ * git: don't issue a warning if the rcsinfo CGI parameter is undefined
+ * git: do not fail to commit changes with a recent git version
+ and an anonymous committer"""]]
I'm trying to add a way to query the data saved by the OpenID plugin from outside of ikiwiki, to see what identity the user has been authenticated as, if any. I'm thinking of designating some directories as internal pages and check the identity against a list in a mod_perl access hook. I would also write a CGI script that would return a JSON formatted reply to tell if the user is authenticated for those pages and query it with AJAX and only render links to the internal pages if the user would have access to them. That's just a couple of ideas I'm working on first, but I can imagine that there's any number of other tricks that people could implement with that sort of a thing.
Also, this isn't really specific to OpenID but to all auth plugins, but I'm going to use only OpenID for authentication so that's what I'm targeting right now. I suppose that would be worth its own TODO item. --[[kaol]]
+
+----
+
+So OpenID is dying, but OpenID connect is actually out there: Google is using it now, and probably other providers. There is interesting hybrid of OpenID and email auth called [Portier](https://portier.github.io) that is a successor to Persona that may be interesting here... The main problem here is that the broker is written in Rust and I am not sure we want to depend on such a thing in Ikiwiki. Still, the protocol could be used as a basis here... --[[anarcat]]
Καλημέρα!
test
+
+I must **emphasise** this.
-Let's do an ikiwiki security analysis.
-
If you are using ikiwiki to render pages that only you can edit, do not
generate any wrappers, and do not use the cgi, then there are no more
security issues with this program than with cat(1). If, however, you let
others edit pages in your wiki, then some possible security issues do need
to be kept in mind.
+If you find a new security vulnerability, please email the maintainers
+privately instead of listing it in a public bug tracker, so that we can
+arrange for coordinated disclosure when a fix is available. The maintainers
+are [[Joey Hess|joey]] (<joey@kitenet.net>),
+[[Simon McVittie|smcv]] (<smcv@debian.org>)
+and [[Amitai Schleier|schmonz]] (<schmonz-web-ikiwiki@schmonz.com>).
+
[[!toc levels=2]]
----
for sites where an untrusted user is able to attach files with arbitrary
names and/or run a setuid ikiwiki wrapper with a working directory of
their choice.
+
+## Editing restriction bypass for git revert
+
+intrigeri discovered that a web or git user could revert a change to a
+page they are not allowed to edit, if the change being reverted was made
+before the page was moved from a location where that user had permission
+to edit it. For example, if a file is moved from `drafts/policy.mdwn`
+(editable by less-trusted users) to `policy.mdwn` (only editable
+by more-trusted users), a less-trusted user could revert a change
+that was made to `drafts/policy.mdwn` prior to that move, and it would
+result in `policy.mdwn` being altered.
+
+This affects sites with the `git` VCS and the `recentchanges` plugin,
+which are both used in most ikiwiki installations.
+
+This bug was reported on 2016-12-17. A partially fixed version
+3.20161219 was released on 2016-12-19, but the solution used in that
+version was not effective with git versions older than 2.8.0.
+
+([[!cve CVE-2016-10026]] represents the original vulnerability.
+[[!cve CVE-2016-9645]]/OVE-20161226-0002 represents the vulnerability
+in 3.20161219 caused by the incomplete fix.)
+
+## Commit metadata forgery via CGI::FormBuilder context-dependent APIs
+
+When CGI::FormBuilder->field("foo") is called in list context (and
+in particular in the arguments to a subroutine that takes named
+arguments), it can return zero or more values for foo from the CGI
+request, rather than the expected single value. This breaks the usual
+Perl parsing convention for named arguments, similar to CVE-2014-1572
+in Bugzilla (which was caused by a similar API design issue in CGI.pm).
+
+In ikiwiki, this appears to have been exploitable in two places, both
+of them relatively minor:
+
+* in the comments plugin, an attacker who was able to post a comment
+ could give it a user-specified author and author-URL even if the wiki
+ configuration did not allow for that, by crafting multiple values
+ for other fields
+* in the editpage plugin, an attacker who was able to edit a page
+ could potentially forge commit authorship (attribute their edit to
+ someone else) by crafting multiple values for the rcsinfo field
+
+([[!cve CVE-2016-9646]]/OVE-20161226-0001)
* [[!shortcut name=iki url="http://ikiwiki.info/%S/"]]
* [[!shortcut name=ljuser url="http://%s.livejournal.com/"]]
* [[!shortcut name=rfc url="https://www.ietf.org/rfc/rfc%s.txt" desc="RFC %s"]]
-* [[!shortcut name=c2 url="http://c2.com/cgi/wiki?%s"]]
+* [[!shortcut name=c2 url="http://wiki.c2.com/?%s"]]
* [[!shortcut name=meatballwiki url="http://www.usemod.com/cgi-bin/mb.pl?%s"]]
* [[!shortcut name=emacswiki url="http://www.emacswiki.org/cgi-bin/wiki/%s"]]
* [[!shortcut name=haskellwiki url="http://haskell.org/haskellwiki/%s"]]
display: block;
}
-.header {
+div.header, header.header {
margin: 0;
font-size: 140%;
font-weight: bold;
* Mick Pollard
* Nico Schottelius
* Jon Dowland
-* Amitai Schlair
+* Amitai Schleier
* Luca Capello
* Patrick ZAJDA
* Fergus Cameron
[[!meta date="2015-01-07 23:11:57 -0500"]]
-[[Amitai Schlair|schmonz]] wrote an `ikiwiki-import` program that
+[[Amitai Schleier|schmonz]] wrote an `ikiwiki-import` program that
currently can only [import from
Textpattern](https://github.com/schmonz/txp2ikiwiki), and then only
if configured exactly like the site he imported. But `ikiwiki-import`
Also, note that the `/tmp/fcgi.socket` file needs to be writable by the webserver. I am also unsure as to the security of this setup, as I am using this only on my dev server. Needless to say that [[real fastcgi support|todo/fastcgi_or_modperl_installation_instructions]] would be great. ;) --[[anarcat]]
+2016-09-23 [[alexjj]]: On Debian Jessie creating the fcgi ikiwiki.service is not necessary. Install fcgiwrap, it'll auto start the fcgiwrap systemd service and then add the parameters in the virtual host:
+
+ location / {
+ try_files $uri $uri/ =404;
+ }
+
+ # Max size of file upload
+ client_max_body_size 10m;
+
+ location ~ .cgi {
+ #gzip off is recommended in /usr/share/doc/fcgiwrap/README.Debian
+ gzip off;
+ fastcgi_pass unix:/var/run/fcgiwrap.socket;
+ include /etc/nginx/fastcgi_params;
+ }
+
+Here's a [[complete site.conf|tips/nginx]] for nginx if that helps.
+
## boa
Edit /etc/boa/boa.conf and make sure the following line is not commented:
--- /dev/null
+There's a lot of scattered info about nginx. This is what I've deduced from reading various blogs, Debian READMEs and the [nginx wiki](https://www.nginx.com/resources/wiki/start/topics/examples/fcgiwrap/).
+
+For Debian I suggest installing nginx from [dotdeb](https://www.dotdeb.org/instructions/). They provide the latest stable versions.
+
+For cgi install ```fcgiwrap```
+
+Here is a full sites-enabled/example.com configure for hosting ikiwiki on the root domain, example.com:
+
+```
+server {
+ listen 443 default_server;
+ listen [::]:443 ssl default_server;
+ root /home/ikiwiki/public_html/wiki;
+
+ index index.html;
+
+ server_name example.com www.example.com;
+
+ ssl_certificate /etc/nginx/ssl/example.com.pem;
+ ssl_certificate_key /etc/nginx/ssl/example.com.key;
+
+ ssl_session_timeout 5m;
+ ssl_session_cache shared:SSL:50m;
+ ssl_dhparam /etc/nginx/ssl/dhparam.pem;
+ ssl_prefer_server_ciphers on;
+ ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
+ ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
+ resolver 8.8.8.8;
+ ssl_stapling on;
+ ssl_trusted_certificate /etc/nginx/ssl/example.com.pem;
+ add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;";
+
+ client_max_body_size 10m;
+
+ # ikiwiki site
+
+ location / {
+ try_files $uri $uri/ =404;
+ }
+
+ location ~ .cgi {
+ gzip off;
+ fastcgi_pass unix:/var/run/fcgiwrap.socket;
+ include /etc/nginx/fastcgi_params;
+
+ }
+}
+
+
+##
+#Forward http to https
+##
+
+server {
+ listen 80 default_server;
+ listen [::]:80 default_server;
+ server_name example.com www.example.com;
+ return 301 https://$host$request_uri;
+}
+```
+
+For SSL tips this [gist](https://gist.github.com/plentz/6737338) is a good source of information. Use [letsencrypt](https://letsencrypt.org/) to get free certificates.
BrowserID, or Mobilla Persona, is shutting down with 30th of November 2016.
Seen at <https://login.persona.org/about> --[[leg]]
+
+The successor to Persona seems to be [Portier](https://portier.github.io), which is based on OpenID connect. --[[anarcat]]
>>> --[[smcv]]a
>>> Email addresses are now cloaked in commits, using foaf:mbox_sha1sum. --[[Joey]]
+
+Note that the implementation of this lives in [[plugins/emailauth]].
+
+Also, I have found a similar system called [Portier](https://portier.github.io) that enables email-based auth but enhances it with [[plugins/openid]] connect... Maybe ikiwiki's authentication system could follow the standards set by Portier? OpenID connect discovery is particularly interesting, as it could mean that using your GMail address to login to ikiwiki would mean that you go straight to the more secure OpenID / Oauth authentication instead of relying on the slow "send email and click link" system... --[[anarcat]]
--- /dev/null
+The size parameter should accept relative values, like "100%". When including large images, I would like it to be scaled relative to the available space.
+
+> 100% of what?
+>
+> The purpose of `[[!img]]` is to scale large images, for example photos, down
+> to a more web-suitable size. When ikiwiki rebuilds the website, it cannot
+> know how large visitors' web browser windows are going to be, so it cannot
+> scale the image relative to the size of a visitor's web browser window.
+>
+> The closest thing it could do would be to not scale the image at all
+> (potentially a very large download if it's a high-resolution photo),
+> and use CSS or `<img sizes=...>` to ask the visitor's web browser to scale
+> the image relative to something the web browser knows, such as the viewport
+> size.
+>
+> With HTML5 `<img sizes="..." srcset="...">`, it would be possible to extend
+> `[[!img]]` to produce more than one resized image and let the visitor's
+> browser choose which one to download, but I'm not sure what a good syntax
+> for that would look like...
+>
+> "The available space" is not something we can use, because current HTML
+> standards do not offer that. In HTML5 it is possible to base sizes on the
+> viewport (window) size, but the available space (excluding sidebars etc.)
+> is not something the browser can know in advance, because it needs to know
+> how large images are before it carries out layout calculations, and it
+> needs to carry out layout calculations before it can know the available
+> space.
+>
+> --[[smcv]]
>> [See for yourself](https://notabug.org/iikb/ikiwiki-theme-bootstrap/commit/7f30630b6255336a34b14f70f2a674e15cd797a0) - don't mind the red parts.
>> This is tedious and boring, it's easier to tamper with template files
>> than to rewrite bootstrap by copying and pasting it. --[[desci]]
+
+> Is there any progress here? Someone wanting to build a Bootstrap 4
+> should look at working with this branch or a custom theme?
+>
+> For the record, there is a Debian package for
+> [font-awesome][]. [mkdocs-bootstrap][] uses
+> that. [sphinx-bootstrap-theme][] is another bootstrap-based theme
+> packaged in Debian. Both ship embeded copies of Bootstrap 3, so
+> there are prior offenses to just shipping the code within the
+> package.
+>
+> It would be preferable to package bootstrap 4 seperately of
+> course... I made a [RFP for packaging B4](http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=842828).
+>
+> I was somehow under the impression that Boostrap 4 was lighter, but
+> looking at the actual code on the alpha site makes me think that it
+> is actually larger, which reduces the incentives for me to do the
+> upgrade... Along with jquery, it's a 100KB overhead on first load,
+> something that shouldn't be neglected. The [alpha site][] is around
+> 1MB and 25 requests! My site can currently squeeze all of jquery and
+> boostrap in 80KB (including the glyphs font) and it's only that
+> stupid Mozilla Fira font that makes it blow up to 300KB... So I am
+> not sure I would switch to B4 - maybe doing a B3 merge would be best
+> for now, especially since Bootstrap 3 is already packaged in Debian?
+> -- [[anarcat]]
+
+[alpha site]: https://v4-alpha.getbootstrap.com
+[bug #704330]: https://bugs.debian.org/704330
+[orphaned]: https://tracker.debian.org/pkg/twitter-bootstrap
+[sphinx-bootstrap-theme]: https://tracker.debian.org/pkg/sphinx-bootstrap-theme
+[mkdocs-bootstrap]: https://tracker.debian.org/pkg/mkdocs-bootstrap
+[font-awesome]: https://tracker.debian.org/pkg/fonts-font-awesome
-----
Rather than coding plugins for the Perl ikiwiki in Haskell, I wonder how easily a Haskell ikiwiki could still support plugins written in Perl? The (old and apparently stale) [HsPerl5](http://hackage.haskell.org/package/HsPerl5) package might provide a helpful starting point there. -- [[JoshTriplett]]
+
+-----
+
+I'm very keen on this, and would be interested in helping. I've been wanting to use ikiwiki for years, but the idea of investing time in the perl ecosystem and perl-based implementation stops me. -- [[Simon Michael]]
+++ /dev/null
-[[!meta redir=users/schmonz]]
--- /dev/null
+[[!meta redir=users/schmonz]]
--- /dev/null
+I'm using two Ikiwiki config files for a private journal and [public blog](https://inert.io), serving them with [Hiawatha](http://hiawatha-webserver.org/)
-Simon Michael (<simon@joyful.com>, sm on freenode) is a free software developer and consultant.
-His site is at <http://joyful.com>.
-He is the lead developer of the [Zwiki](http://zwiki.org) zope-based wiki engine, and also an ikiwiki fan.
+I'm Simon Michael (<simon@joyful.com>, sm on freenode, <http://joyful.com>), a free software developer and consultant.
-Favourite ikiwiki features: efficient/robust static html and rcs integration.
-Least favourite ikiwiki features: unstable hierarchical urls and setup complexity.
-He is [interested](http://ikiwiki.info/rcs/details/#index3h2) in getting a
-robust [darcs](http://ikiwiki.info/todo/darcs/) back end working.
+I developed the [Zwiki](http://zwiki.org) wiki engine (and more recently [hledger](http://hledger.org)), and am an ikiwiki fan.
+I'd like to see [[todo/rewrite ikiwiki in haskell]] happen.
-<http://github.com/blipvert>
+<https://github.com/blipvert>
-[Amitai Schlair](http://www.schmonz.com/) has contributed code to ikiwiki...
+[Amitai Schleier](https://schmonz.com/) has contributed code to ikiwiki...
[[!map
pages="!*/Discussion and ((link(users/schmonz) and plugins/* and !plugins/openid/*) or rcs/cvs or todo/fancypodcast)"
## Public
-* [My personal web site](http://www.schmonz.com/)
+* [My personal web site](https://schmonz.com/)
* [A very small podcast](https://agilein3minut.es/)
* [A major open-source project's wiki](http://wiki.netbsd.org) (with
the [[rcs/cvs]] plugin)
--- /dev/null
+#!/usr/bin/perl
+use warnings;
+use strict;
+
+use Test::More;
+
+BEGIN {
+ my $git = `which git`;
+ chomp $git;
+ plan(skip_all => 'git not available') unless -x $git;
+
+ plan(skip_all => "CGI not available")
+ unless eval q{
+ use CGI qw();
+ 1;
+ };
+
+ plan(skip_all => "IPC::Run not available")
+ unless eval q{
+ use IPC::Run qw(run);
+ 1;
+ };
+
+ use_ok('IkiWiki');
+ use_ok('YAML::XS');
+}
+
+# We check for English error messages
+$ENV{LC_ALL} = 'C';
+
+use Cwd qw(getcwd);
+use Errno qw(ENOENT);
+
+my $installed = $ENV{INSTALLED_TESTS};
+
+my @command;
+if ($installed) {
+ @command = qw(ikiwiki);
+}
+else {
+ ok(! system("make -s ikiwiki.out"));
+ @command = ("perl", "-I".getcwd."/blib/lib", './ikiwiki.out',
+ '--underlaydir='.getcwd.'/underlays/basewiki',
+ '--set', 'underlaydirbase='.getcwd.'/underlays',
+ '--templatedir='.getcwd.'/templates');
+}
+
+sub write_old_file {
+ my $name = shift;
+ my $dir = shift;
+ my $content = shift;
+ writefile($name, $dir, $content);
+ ok(utime(333333333, 333333333, "$dir/$name"));
+}
+
+sub write_setup_file {
+ my %setup = (
+ wikiname => 'this is the name of my wiki',
+ srcdir => getcwd.'/t/tmp/in/doc',
+ destdir => getcwd.'/t/tmp/out',
+ url => 'http://example.com',
+ cgiurl => 'http://example.com/cgi-bin/ikiwiki.cgi',
+ cgi_wrapper => getcwd.'/t/tmp/ikiwiki.cgi',
+ cgi_wrappermode => '0751',
+ add_plugins => [qw(anonok attachment lockedit recentchanges)],
+ disable_plugins => [qw(emailauth openid passwordauth)],
+ anonok_pagespec => 'writable/*',
+ locked_pages => '!writable/*',
+ rcs => 'git',
+ git_wrapper => getcwd.'/t/tmp/in/.git/hooks/post-commit',
+ git_wrappermode => '0754',
+ gitorigin_branch => '',
+ );
+ unless ($installed) {
+ $setup{ENV} = { 'PERL5LIB' => getcwd.'/blib/lib' };
+ }
+ writefile("test.setup", "t/tmp",
+ "# IkiWiki::Setup::Yaml - YAML formatted setup file\n" .
+ Dump(\%setup));
+}
+
+sub thoroughly_rebuild {
+ ok(unlink("t/tmp/ikiwiki.cgi") || $!{ENOENT});
+ ok(unlink("t/tmp/in/.git/hooks/post-commit") || $!{ENOENT});
+ ok(! system(@command, qw(--setup t/tmp/test.setup --rebuild --wrappers)));
+}
+
+sub check_cgi_mode_bits {
+ my $mode;
+
+ (undef, undef, $mode, undef, undef,
+ undef, undef, undef, undef, undef,
+ undef, undef, undef) = stat('t/tmp/ikiwiki.cgi');
+ is ($mode & 07777, 0751);
+ (undef, undef, $mode, undef, undef,
+ undef, undef, undef, undef, undef,
+ undef, undef, undef) = stat('t/tmp/in/.git/hooks/post-commit');
+ is ($mode & 07777, 0754);
+}
+
+sub run_cgi {
+ my (%args) = @_;
+ my ($in, $out);
+ my $method = $args{method} || 'GET';
+ my $environ = $args{environ} || {};
+ my $params = $args{params} || { do => 'prefs' };
+
+ my %defaults = (
+ SCRIPT_NAME => '/cgi-bin/ikiwiki.cgi',
+ HTTP_HOST => 'example.com',
+ );
+
+ my $cgi = CGI->new($args{params});
+ my $query_string = $cgi->query_string();
+
+ if ($method eq 'POST') {
+ $defaults{REQUEST_METHOD} = 'POST';
+ $in = $query_string;
+ $defaults{CONTENT_LENGTH} = length $in;
+ } else {
+ $defaults{REQUEST_METHOD} = 'GET';
+ $defaults{QUERY_STRING} = $query_string;
+ }
+
+ my %envvars = (
+ %defaults,
+ %$environ,
+ );
+ run(["./t/tmp/ikiwiki.cgi"], \$in, \$out, init => sub {
+ map {
+ $ENV{$_} = $envvars{$_}
+ } keys(%envvars);
+ });
+
+ return $out;
+}
+
+sub run_git {
+ my (undef, $filename, $line) = caller;
+ my $args = shift;
+ my $desc = shift || join(' ', 'git', @$args);
+ my ($in, $out);
+ ok(run(['git', @$args], \$in, \$out, init => sub {
+ chdir 't/tmp/in' or die $!;
+ $ENV{EMAIL} = 'nobody@ikiwiki-tests.invalid';
+ }), "$desc at $filename:$line");
+ return $out;
+}
+
+sub test {
+ my $content;
+ my $status;
+
+ ok(! system(qw(rm -rf t/tmp)));
+ ok(! system(qw(mkdir t/tmp)));
+
+ write_old_file('.gitignore', 't/tmp/in', "/doc/.ikiwiki/\n");
+ write_old_file('doc/writable/one.mdwn', 't/tmp/in', 'This is the first test page');
+ write_old_file('doc/writable/two.mdwn', 't/tmp/in', 'This is the second test page');
+ write_old_file('doc/writable/three.mdwn', 't/tmp/in', 'This is the third test page');
+ write_old_file('doc/writable/three.bin', 't/tmp/in', 'An attachment');
+
+ unless ($installed) {
+ ok(! system(qw(cp -pRL doc/wikiicons t/tmp/in/doc/)));
+ ok(! system(qw(cp -pRL doc/recentchanges.mdwn t/tmp/in/doc/)));
+ }
+
+ run_git(['init']);
+ run_git(['add', '.']);
+ run_git(['commit', '-m', 'Initial commit']);
+
+ write_setup_file();
+ thoroughly_rebuild();
+ check_cgi_mode_bits();
+
+ ok(-e 't/tmp/out/writable/one/index.html');
+ $content = readfile('t/tmp/out/writable/one/index.html');
+ like($content, qr{This is the first test page});
+ my $orig_sha1 = run_git(['rev-list', '--max-count=1', 'HEAD']);
+
+ # Test the git hook, which accepts git commits
+ writefile('doc/writable/one.mdwn', 't/tmp/in',
+ 'This is new content for the first test page');
+ run_git(['add', '.']);
+ run_git(['commit', '-m', 'Git commit']);
+ my $first_revertable_sha1 = run_git(['rev-list', '--max-count=1', 'HEAD']);
+ isnt($orig_sha1, $first_revertable_sha1);
+
+ ok(-e 't/tmp/out/writable/one/index.html');
+ $content = readfile('t/tmp/out/writable/one/index.html');
+ like($content, qr{This is new content for the first test page});
+
+ # Test a web commit
+ $content = run_cgi(method => 'POST',
+ params => {
+ do => 'edit',
+ page => 'writable/two',
+ type => 'mdwn',
+ editmessage => 'Web commit',
+ editcontent => 'Here is new content for the second page',
+ _submit => 'Save Page',
+ _submitted => '1',
+ },
+ );
+ like($content, qr{^Status:\s*302\s}m);
+ like($content, qr{^Location:\s*http://example\.com/writable/two/\?updated}m);
+ my $second_revertable_sha1 = run_git(['rev-list', '--max-count=1', 'HEAD']);
+ isnt($orig_sha1, $second_revertable_sha1);
+ isnt($first_revertable_sha1, $second_revertable_sha1);
+
+ ok(-e 't/tmp/out/writable/two/index.html');
+ $content = readfile('t/tmp/out/writable/two/index.html');
+ like($content, qr{Here is new content for the second page});
+
+ # Another edit
+ writefile('doc/writable/three.mdwn', 't/tmp/in',
+ 'Also new content for the third page');
+ unlink('t/tmp/in/doc/writable/three.bin');
+ writefile('doc/writable/three.bin', 't/tmp/in',
+ 'Changed attachment');
+ run_git(['add', '.']);
+ run_git(['commit', '-m', 'Git commit']);
+ ok(-e 't/tmp/out/writable/three/index.html');
+ $content = readfile('t/tmp/out/writable/three/index.html');
+ like($content, qr{Also new content for the third page});
+ $content = readfile('t/tmp/out/writable/three.bin');
+ like($content, qr{Changed attachment});
+ my $third_revertable_sha1 = run_git(['rev-list', '--max-count=1', 'HEAD']);
+ isnt($orig_sha1, $third_revertable_sha1);
+ isnt($second_revertable_sha1, $third_revertable_sha1);
+
+ run_git(['mv', 'doc/writable/one.mdwn', 'doc/one.mdwn']);
+ run_git(['mv', 'doc/writable/two.mdwn', 'two.mdwn']);
+ run_git(['commit', '-m', 'Rename files to test CVE-2016-10026']);
+ ok(! -e 't/tmp/out/writable/two/index.html');
+ ok(! -e 't/tmp/out/writable/one/index.html');
+ ok(-e 't/tmp/out/one/index.html');
+ my $sha1_before_revert = run_git(['rev-list', '--max-count=1', 'HEAD']);
+ isnt($sha1_before_revert, $third_revertable_sha1);
+
+ $content = run_cgi(method => 'post',
+ params => {
+ do => 'revert',
+ revertmessage => 'CVE-2016-10026',
+ rev => $first_revertable_sha1,
+ _submit => 'Revert',
+ _submitted_revert => '1',
+ },
+ );
+ like($content, qr{is locked and cannot be edited});
+ # The tree is left clean
+ run_git(['diff', '--exit-code']);
+ run_git(['diff', '--cached', '--exit-code']);
+ my $sha1 = run_git(['rev-list', '--max-count=1', 'HEAD']);
+ is($sha1, $sha1_before_revert);
+
+ ok(-e 't/tmp/out/one/index.html');
+ ok(! -e 't/tmp/in/doc/writable/one.mdwn');
+ ok(-e 't/tmp/in/doc/one.mdwn');
+ $content = readfile('t/tmp/out/one/index.html');
+ like($content, qr{This is new content for the first test page});
+
+ $content = run_cgi(method => 'post',
+ params => {
+ do => 'revert',
+ revertmessage => 'CVE-2016-10026',
+ rev => $second_revertable_sha1,
+ _submit => 'Revert',
+ _submitted_revert => '1',
+ },
+ );
+ like($content, qr{you are not allowed to change two\.mdwn});
+ run_git(['diff', '--exit-code']);
+ run_git(['diff', '--cached', '--exit-code']);
+ $sha1 = run_git(['rev-list', '--max-count=1', 'HEAD']);
+ is($sha1, $sha1_before_revert);
+
+ ok(! -e 't/tmp/out/writable/two/index.html');
+ ok(! -e 't/tmp/out/two/index.html');
+ ok(! -e 't/tmp/in/doc/writable/two.mdwn');
+ ok(-e 't/tmp/in/two.mdwn');
+ $content = readfile('t/tmp/in/two.mdwn');
+ like($content, qr{Here is new content for the second page});
+
+ # This one can legitimately be reverted
+ $content = run_cgi(method => 'post',
+ params => {
+ do => 'revert',
+ revertmessage => 'not CVE-2016-10026',
+ rev => $third_revertable_sha1,
+ _submit => 'Revert',
+ _submitted_revert => '1',
+ },
+ );
+ like($content, qr{^Status:\s*302\s}m);
+ like($content, qr{^Location:\s*http://example\.com/recentchanges/}m);
+ run_git(['diff', '--exit-code']);
+ run_git(['diff', '--cached', '--exit-code']);
+ ok(-e 't/tmp/out/writable/three/index.html');
+ $content = readfile('t/tmp/out/writable/three/index.html');
+ like($content, qr{This is the third test page});
+ $content = readfile('t/tmp/out/writable/three.bin');
+ like($content, qr{An attachment});
+}
+
+test();
+
+done_testing();
--- /dev/null
+export PERL5LIB=$(CURDIR)/../../..
+ikiwiki = $(CURDIR)/../../../ikiwiki.in
+
+all:
+ rm -fr git html
+ mkdir -p git
+ echo "/doc/.ikiwiki/" > git/.gitignore
+ mkdir -p git/doc
+ mkdir -p git/doc/writable
+ cd git && git init
+ cp index.mdwn git/doc/index.mdwn
+ cp -a $(CURDIR)/../../../doc/wikiicons git/doc/
+ cp -a $(CURDIR)/../../../doc/recentchanges.mdwn git/doc/
+ echo "This is the first test page" > git/doc/writable/one.mdwn
+ echo "This is the second test page" > git/doc/writable/two.mdwn
+ cd git && git add . && git commit -m 'Initial commit'
+ $(ikiwiki) --setup ikiwiki.setup
+ echo "This is the first test page, it was first" > git/doc/writable/one.mdwn
+ cd git && git add . && git commit -m 'First web commit'
+ $(ikiwiki) --setup ikiwiki.setup
+ echo "This is the second test page, it came second" > git/doc/writable/two.mdwn
+ cd git && git add . && git commit -m 'Second web commit'
+ $(ikiwiki) --setup ikiwiki.setup
+ cd git && git mv doc/writable/one.mdwn doc/one.mdwn
+ cd git && git mv doc/writable/two.mdwn two.mdwn
+ cd git && git commit -m Rename
+ $(ikiwiki) --setup ikiwiki.setup
+ w3m html/index.html
--- /dev/null
+#!/usr/bin/perl
+
+use Cwd qw(getcwd abs_path);
+my $pwd;
+my $src;
+
+BEGIN {
+ $pwd = getcwd;
+ $src = abs_path("$pwd/../../..");
+}
+
+use IkiWiki::Setup::Standard {
+ wikiname => "ikiwiki_manual_test_git_revert",
+ srcdir => "$pwd/git/doc",
+ destdir => "$pwd/html",
+ templatedir => "$src/templates",
+ underlaydirbase => "$src/underlays",
+ underlaydir => "$src/underlays/basewiki",
+ verbose => 1,
+ syslog => 0,
+ usedirs => 0,
+ w3mmode => 1,
+ url => "file://$pwd/html",
+ cgiurl => 'file:///$LIB/ikiwiki-w3m.cgi/ikiwiki_manual_test_git_revert.cgi',
+ add_plugins => [qw{recentchanges anonok lockedit}],
+ locked_pages => '!writable/*',
+ anonok_pagespec => 'writable/*',
+ disable_plugins => [qw{passwordauth emailauth openid}],
+ rcs => 'git',
+ gitorigin_branch => '',
+}
--- /dev/null
+== Git revert manual test instructions ==
+
+* Look at the recent changes, below, in w3m
+* Try to revert each of the two web commits
+* Expected result: both fail
+
+[[!inline pages="internal(recentchanges/change_*) and !*/Discussion"
+template=recentchanges show=0]]
font-size: 120%;
}
-.header {
+div.header, header.header {
font-weight: normal;
}
font-family: 'Lato', sans-serif;
}
-.header {
+div.header, header.header {
margin-bottom: 0.5em;
}
.pageheader .actions ul {