--- /dev/null
- $message = IkiWiki::possibly_foolish_untaint($message);
- if (run_or_non('git', 'commit', '--cleanup=verbatim',
- '-q', '-m', $message)) {
+#!/usr/bin/perl
+package IkiWiki::Plugin::git;
+
+use warnings;
+use strict;
+use IkiWiki;
+use Encode;
+use open qw{:utf8 :std};
+
+my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate Git sha1sums
+my $dummy_commit_msg = 'dummy commit'; # message to skip in recent changes
+
+sub import { #{{{
+ if (exists $IkiWiki::hooks{rcs}) {
+ error(gettext("cannot use multiple rcs plugins"));
+ }
+ hook(type => "checkconfig", id => "git", call => \&checkconfig);
+ hook(type => "getsetup", id => "git", call => \&getsetup);
+ hook(type => "rcs", id => "rcs_update", call => \&rcs_update);
+ hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit);
+ hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit);
+ hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged);
+ hook(type => "rcs", id => "rcs_add", call => \&rcs_add);
+ hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove);
+ hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename);
+ hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges);
+ hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff);
+ hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime);
+} #}}}
+
+sub checkconfig () { #{{{
+ if (! defined $config{gitorigin_branch}) {
+ $config{gitorigin_branch}="origin";
+ }
+ if (! defined $config{gitmaster_branch}) {
+ $config{gitmaster_branch}="master";
+ }
+ if (defined $config{git_wrapper} && length $config{git_wrapper}) {
+ push @{$config{wrappers}}, {
+ wrapper => $config{git_wrapper},
+ wrappermode => (defined $config{git_wrappermode} ? $config{git_wrappermode} : "06755"),
+ };
+ }
+} #}}}
+
+sub getsetup () { #{{{
+ return
+ git_wrapper => {
+ type => "string",
+ example => "/git/wiki.git/hooks/post-update",
+ description => "git post-update executable to generate",
+ safe => 0, # file
+ rebuild => 0,
+ },
+ git_wrappermode => {
+ type => "string",
+ example => '06755',
+ description => "mode for git_wrapper (can safely be made suid)",
+ safe => 0,
+ rebuild => 0,
+ },
+ historyurl => {
+ type => "string",
+ example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=history;f=[[file]]",
+ description => "gitweb url to show file history ([[file]] substituted)",
+ safe => 1,
+ rebuild => 1,
+ },
+ diffurl => {
+ type => "string",
+ example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=blobdiff;h=[[sha1_to]];hp=[[sha1_from]];hb=[[sha1_parent]];f=[[file]]",
+ description => "gitweb url to show a diff ([[sha1_to]], [[sha1_from]], [[sha1_parent]], and [[file]] substituted)",
+ safe => 1,
+ rebuild => 1,
+ },
+ gitorigin_branch => {
+ type => "string",
+ example => "origin",
+ description => "where to pull and push changes (set to empty string to disable)",
+ safe => 0, # paranoia
+ rebuild => 0,
+ },
+ gitmaster_branch => {
+ type => "string",
+ example => "master",
+ description => "branch that the wiki is stored in",
+ safe => 0, # paranoia
+ rebuild => 0,
+ },
+} #}}}
+
+sub safe_git (&@) { #{{{
+ # Start a child process safely without resorting /bin/sh.
+ # Return command output or success state (in scalar context).
+
+ my ($error_handler, @cmdline) = @_;
+
+ my $pid = open my $OUT, "-|";
+
+ error("Cannot fork: $!") if !defined $pid;
+
+ if (!$pid) {
+ # In child.
+ # Git commands want to be in wc.
+ chdir $config{srcdir}
+ or error("Cannot chdir to $config{srcdir}: $!");
+ exec @cmdline or error("Cannot exec '@cmdline': $!");
+ }
+ # In parent.
+
+ my @lines;
+ while (<$OUT>) {
+ chomp;
+ push @lines, $_;
+ }
+
+ close $OUT;
+
+ $error_handler->("'@cmdline' failed: $!") if $? && $error_handler;
+
+ return wantarray ? @lines : ($? == 0);
+}
+# Convenient wrappers.
+sub run_or_die ($@) { safe_git(\&error, @_) }
+sub run_or_cry ($@) { safe_git(sub { warn @_ }, @_) }
+sub run_or_non ($@) { safe_git(undef, @_) }
+#}}}
+
+sub merge_past ($$$) { #{{{
+ # Unlike with Subversion, Git cannot make a 'svn merge -rN:M file'.
+ # Git merge commands work with the committed changes, except in the
+ # implicit case of '-m' of git checkout(1). So we should invent a
+ # kludge here. In principle, we need to create a throw-away branch
+ # in preparing for the merge itself. Since branches are cheap (and
+ # branching is fast), this shouldn't cost high.
+ #
+ # The main problem is the presence of _uncommitted_ local changes. One
+ # possible approach to get rid of this situation could be that we first
+ # make a temporary commit in the master branch and later restore the
+ # initial state (this is possible since Git has the ability to undo a
+ # commit, i.e. 'git reset --soft HEAD^'). The method can be summarized
+ # as follows:
+ #
+ # - create a diff of HEAD:current-sha1
+ # - dummy commit
+ # - create a dummy branch and switch to it
+ # - rewind to past (reset --hard to the current-sha1)
+ # - apply the diff and commit
+ # - switch to master and do the merge with the dummy branch
+ # - make a soft reset (undo the last commit of master)
+ #
+ # The above method has some drawbacks: (1) it needs a redundant commit
+ # just to get rid of local changes, (2) somewhat slow because of the
+ # required system forks. Until someone points a more straight method
+ # (which I would be grateful) I have implemented an alternative method.
+ # In this approach, we hide all the modified files from Git by renaming
+ # them (using the 'rename' builtin) and later restore those files in
+ # the throw-away branch (that is, we put the files themselves instead
+ # of applying a patch).
+
+ my ($sha1, $file, $message) = @_;
+
+ my @undo; # undo stack for cleanup in case of an error
+ my $conflict; # file content with conflict markers
+
+ eval {
+ # Hide local changes from Git by renaming the modified file.
+ # Relative paths must be converted to absolute for renaming.
+ my ($target, $hidden) = (
+ "$config{srcdir}/${file}", "$config{srcdir}/${file}.${sha1}"
+ );
+ rename($target, $hidden)
+ or error("rename '$target' to '$hidden' failed: $!");
+ # Ensure to restore the renamed file on error.
+ push @undo, sub {
+ return if ! -e "$hidden"; # already renamed
+ rename($hidden, $target)
+ or warn "rename '$hidden' to '$target' failed: $!";
+ };
+
+ my $branch = "throw_away_${sha1}"; # supposed to be unique
+
+ # Create a throw-away branch and rewind backward.
+ push @undo, sub { run_or_cry('git', 'branch', '-D', $branch) };
+ run_or_die('git', 'branch', $branch, $sha1);
+
+ # Switch to throw-away branch for the merge operation.
+ push @undo, sub {
+ if (!run_or_cry('git', 'checkout', $config{gitmaster_branch})) {
+ run_or_cry('git', 'checkout','-f',$config{gitmaster_branch});
+ }
+ };
+ run_or_die('git', 'checkout', $branch);
+
+ # Put the modified file in _this_ branch.
+ rename($hidden, $target)
+ or error("rename '$hidden' to '$target' failed: $!");
+
+ # _Silently_ commit all modifications in the current branch.
+ run_or_non('git', 'commit', '-m', $message, '-a');
+ # ... and re-switch to master.
+ run_or_die('git', 'checkout', $config{gitmaster_branch});
+
+ # Attempt to merge without complaining.
+ if (!run_or_non('git', 'pull', '--no-commit', '.', $branch)) {
+ $conflict = readfile($target);
+ run_or_die('git', 'reset', '--hard');
+ }
+ };
+ my $failure = $@;
+
+ # Process undo stack (in reverse order). By policy cleanup
+ # actions should normally print a warning on failure.
+ while (my $handle = pop @undo) {
+ $handle->();
+ }
+
+ error("Git merge failed!\n$failure\n") if $failure;
+
+ return $conflict;
+} #}}}
+
+sub parse_diff_tree ($@) { #{{{
+ # Parse the raw diff tree chunk and return the info hash.
+ # See git-diff-tree(1) for the syntax.
+
+ my ($prefix, $dt_ref) = @_;
+
+ # End of stream?
+ return if !defined @{ $dt_ref } ||
+ !defined @{ $dt_ref }[0] || !length @{ $dt_ref }[0];
+
+ my %ci;
+ # Header line.
+ while (my $line = shift @{ $dt_ref }) {
+ return if $line !~ m/^(.+) ($sha1_pattern)/;
+
+ my $sha1 = $2;
+ $ci{'sha1'} = $sha1;
+ last;
+ }
+
+ # Identification lines for the commit.
+ while (my $line = shift @{ $dt_ref }) {
+ # Regexps are semi-stolen from gitweb.cgi.
+ if ($line =~ m/^tree ([0-9a-fA-F]{40})$/) {
+ $ci{'tree'} = $1;
+ }
+ elsif ($line =~ m/^parent ([0-9a-fA-F]{40})$/) {
+ # XXX: collecting in reverse order
+ push @{ $ci{'parents'} }, $1;
+ }
+ elsif ($line =~ m/^(author|committer) (.*) ([0-9]+) (.*)$/) {
+ my ($who, $name, $epoch, $tz) =
+ ($1, $2, $3, $4 );
+
+ $ci{ $who } = $name;
+ $ci{ "${who}_epoch" } = $epoch;
+ $ci{ "${who}_tz" } = $tz;
+
+ if ($name =~ m/^[^<]+\s+<([^@>]+)/) {
+ $ci{"${who}_username"} = $1;
+ }
+ elsif ($name =~ m/^([^<]+)\s+<>$/) {
+ $ci{"${who}_username"} = $1;
+ }
+ else {
+ $ci{"${who}_username"} = $name;
+ }
+ }
+ elsif ($line =~ m/^$/) {
+ # Trailing empty line signals next section.
+ last;
+ }
+ }
+
+ debug("No 'tree' seen in diff-tree output") if !defined $ci{'tree'};
+
+ if (defined $ci{'parents'}) {
+ $ci{'parent'} = @{ $ci{'parents'} }[0];
+ }
+ else {
+ $ci{'parent'} = 0 x 40;
+ }
+
+ # Commit message (optional).
+ while ($dt_ref->[0] =~ /^ /) {
+ my $line = shift @{ $dt_ref };
+ $line =~ s/^ //;
+ push @{ $ci{'comment'} }, $line;
+ }
+ shift @{ $dt_ref } if $dt_ref->[0] =~ /^$/;
+
+ # Modified files.
+ while (my $line = shift @{ $dt_ref }) {
+ if ($line =~ m{^
+ (:+) # number of parents
+ ([^\t]+)\t # modes, sha1, status
+ (.*) # file names
+ $}xo) {
+ my $num_parents = length $1;
+ my @tmp = split(" ", $2);
+ my ($file, $file_to) = split("\t", $3);
+ my @mode_from = splice(@tmp, 0, $num_parents);
+ my $mode_to = shift(@tmp);
+ my @sha1_from = splice(@tmp, 0, $num_parents);
+ my $sha1_to = shift(@tmp);
+ my $status = shift(@tmp);
+
+ if ($file =~ m/^"(.*)"$/) {
+ ($file=$1) =~ s/\\([0-7]{1,3})/chr(oct($1))/eg;
+ }
+ $file =~ s/^\Q$prefix\E//;
+ if (length $file) {
+ push @{ $ci{'details'} }, {
+ 'file' => decode_utf8($file),
+ 'sha1_from' => $sha1_from[0],
+ 'sha1_to' => $sha1_to,
+ };
+ }
+ next;
+ };
+ last;
+ }
+
+ return \%ci;
+} #}}}
+
+sub git_commit_info ($;$) { #{{{
+ # Return an array of commit info hashes of num commits (default: 1)
+ # starting from the given sha1sum.
+
+ my ($sha1, $num) = @_;
+
+ $num ||= 1;
+
+ my @raw_lines = run_or_die('git', 'log', "--max-count=$num",
+ '--pretty=raw', '--raw', '--abbrev=40', '--always', '-c',
+ '-r', $sha1, '--', '.');
+ my ($prefix) = run_or_die('git', 'rev-parse', '--show-prefix');
+
+ my @ci;
+ while (my $parsed = parse_diff_tree(($prefix or ""), \@raw_lines)) {
+ push @ci, $parsed;
+ }
+
+ warn "Cannot parse commit info for '$sha1' commit" if !@ci;
+
+ return wantarray ? @ci : $ci[0];
+} #}}}
+
+sub git_sha1 (;$) { #{{{
+ # Return head sha1sum (of given file).
+
+ my $file = shift || q{--};
+
+ # Ignore error since a non-existing file might be given.
+ my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD',
+ '--', $file);
+ if ($sha1) {
+ ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now
+ } else { debug("Empty sha1sum for '$file'.") }
+ return defined $sha1 ? $sha1 : q{};
+} #}}}
+
+sub rcs_update () { #{{{
+ # Update working directory.
+
+ if (length $config{gitorigin_branch}) {
+ run_or_cry('git', 'pull', $config{gitorigin_branch});
+ }
+} #}}}
+
+sub rcs_prepedit ($) { #{{{
+ # Return the commit sha1sum of the file when editing begins.
+ # This will be later used in rcs_commit if a merge is required.
+
+ my ($file) = @_;
+
+ return git_sha1($file);
+} #}}}
+
+sub rcs_commit ($$$;$$) { #{{{
+ # Try to commit the page; returns undef on _success_ and
+ # a version of the page with the rcs's conflict markers on
+ # failure.
+
+ my ($file, $message, $rcstoken, $user, $ipaddr) = @_;
+
+ # Check to see if the page has been changed by someone else since
+ # rcs_prepedit was called.
+ my $cur = git_sha1($file);
+ my ($prev) = $rcstoken =~ /^($sha1_pattern)$/; # untaint
+
+ if (defined $cur && defined $prev && $cur ne $prev) {
+ my $conflict = merge_past($prev, $file, $dummy_commit_msg);
+ return $conflict if defined $conflict;
+ }
+
+ rcs_add($file);
+ return rcs_commit_staged($message, $user, $ipaddr);
+} #}}}
+
+sub rcs_commit_staged ($$$) {
+ # Commits all staged changes. Changes can be staged using rcs_add,
+ # rcs_remove, and rcs_rename.
+ my ($message, $user, $ipaddr)=@_;
+
+ # Set the commit author and email to the web committer.
+ my %env=%ENV;
+ if (defined $user || defined $ipaddr) {
+ my $u=defined $user ? $user : $ipaddr;
+ $ENV{GIT_AUTHOR_NAME}=$u;
+ $ENV{GIT_AUTHOR_EMAIL}="$u\@web";
+ }
+
++ $message = IkiWiki::possibly_foolish_untaint($message);
++ my @opts;
++ if ($message !~ /\S/) {
++ # Force git to allow empty commit messages.
++ # (If this version of git supports it.)
++ my ($version)=`git --version` =~ /git version (.*)/;
++ if ($version ge "1.5.4") {
++ push @opts, '--cleanup=verbatim';
++ }
++ else {
++ $message.=".";
++ }
++ }
++ push @opts, '-q';
+ # git commit returns non-zero if file has not been really changed.
+ # so we should ignore its exit status (hence run_or_non).
++ if (run_or_non('git', 'commit', @opts, '-m', $message)) {
+ if (length $config{gitorigin_branch}) {
+ run_or_cry('git', 'push', $config{gitorigin_branch});
+ }
+ }
+
+ %ENV=%env;
+ return undef; # success
+}
+
+sub rcs_add ($) { # {{{
+ # Add file to archive.
+
+ my ($file) = @_;
+
+ run_or_cry('git', 'add', $file);
+} #}}}
+
+sub rcs_remove ($) { # {{{
+ # Remove file from archive.
+
+ my ($file) = @_;
+
+ run_or_cry('git', 'rm', '-f', $file);
+} #}}}
+
+sub rcs_rename ($$) { # {{{
+ my ($src, $dest) = @_;
+
+ run_or_cry('git', 'mv', '-f', $src, $dest);
+} #}}}
+
+sub rcs_recentchanges ($) { #{{{
+ # List of recent changes.
+
+ my ($num) = @_;
+
+ eval q{use Date::Parse};
+ error($@) if $@;
+
+ my @rets;
+ foreach my $ci (git_commit_info('HEAD', $num)) {
+ # Skip redundant commits.
+ next if ($ci->{'comment'} && @{$ci->{'comment'}}[0] eq $dummy_commit_msg);
+
+ my ($sha1, $when) = (
+ $ci->{'sha1'},
+ $ci->{'author_epoch'}
+ );
+
+ my @pages;
+ foreach my $detail (@{ $ci->{'details'} }) {
+ my $file = $detail->{'file'};
+
+ my $diffurl = defined $config{'diffurl'} ? $config{'diffurl'} : "";
+ $diffurl =~ s/\[\[file\]\]/$file/go;
+ $diffurl =~ s/\[\[sha1_parent\]\]/$ci->{'parent'}/go;
+ $diffurl =~ s/\[\[sha1_from\]\]/$detail->{'sha1_from'}/go;
+ $diffurl =~ s/\[\[sha1_to\]\]/$detail->{'sha1_to'}/go;
+
+ push @pages, {
+ page => pagename($file),
+ diffurl => $diffurl,
+ };
+ }
+
+ my @messages;
+ my $pastblank=0;
+ foreach my $line (@{$ci->{'comment'}}) {
+ $pastblank=1 if $line eq '';
+ next if $pastblank && $line=~m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i;
+ push @messages, { line => $line };
+ }
+
+ my $user=$ci->{'author_username'};
+ my $web_commit = ($ci->{'author'} =~ /\@web>/);
+
+ # compatability code for old web commit messages
+ if (! $web_commit &&
+ defined $messages[0] &&
+ $messages[0]->{line} =~ m/$config{web_commit_regexp}/) {
+ $user = defined $2 ? "$2" : "$3";
+ $messages[0]->{line} = $4;
+ $web_commit=1;
+ }
+
+ push @rets, {
+ rev => $sha1,
+ user => $user,
+ committype => $web_commit ? "web" : "git",
+ when => $when,
+ message => [@messages],
+ pages => [@pages],
+ } if @pages;
+
+ last if @rets >= $num;
+ }
+
+ return @rets;
+} #}}}
+
+sub rcs_diff ($) { #{{{
+ my $rev=shift;
+ my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint
+ my @lines;
+ foreach my $line (run_or_non("git", "show", $sha1)) {
+ if (@lines || $line=~/^diff --git/) {
+ push @lines, $line."\n";
+ }
+ }
+ if (wantarray) {
+ return @lines;
+ }
+ else {
+ return join("", @lines);
+ }
+} #}}}
+
+sub rcs_getctime ($) { #{{{
+ my $file=shift;
+ # Remove srcdir prefix
+ $file =~ s/^\Q$config{srcdir}\E\/?//;
+
+ my $sha1 = git_sha1($file);
+ my $ci = git_commit_info($sha1);
+ my $ctime = $ci->{'author_epoch'};
+ debug("ctime for '$file': ". localtime($ctime));
+
+ return $ctime;
+} #}}}
+
+1
hook(type => "preprocess", id => "foo", call => \&preprocess);
- Replace "foo" with the command name that will be used inside brackets for
- the preprocessor directive.
-
- Each time the directive is processed, the referenced function (`preprocess`
- in the example above) is called, and is passed named parameters. A "page"
- parameter gives the name of the page that embedded the preprocessor
- directive, while a "destpage" parameter gives the name of the page the
- content is going to (different for inlined pages), and a "preview"
- parameter is set to a true value if the page is being previewed. All
- parameters included in the directive are included as named parameters as
- well. Whatever the function returns goes onto the page in place of the
+ Replace "foo" with the command name that will be used for the preprocessor
directive.
- An optional "scan" parameter, if set to a true value, makes the hook be
- called during the preliminary scan that ikiwiki makes of updated pages,
- before begining to render pages. This parameter should be set to true if
- the hook modifies data in `%links`. Note that doing so will make the hook
- be run twice per page build, so avoid doing it for expensive hooks. (As an
- optimisation, if your preprocessor hook is called in a void contets, you
- can assume it's being run in scan mode.)
+ Each time the directive is processed, the referenced function (`preprocess`
+ in the example above) is called. Whatever the function returns goes onto
+ the page in place of the directive. Or, if the function aborts using
+ `error()`, the directive will be replaced with the error message.
+
+ The function is passed named parameters. First come the parameters set
+ in the preprocessor directive. These are passed in the same order as
+ they're in the directive, and if the preprocessor directive contains a bare
+ parameter (example: `\[[!foo param]]`), that parameter will be passed with
+ an empty value.
+
+ After the parameters from the preprocessor directive some additional ones
+ are passed: A "page" parameter gives the name of the page that embedded the
+ preprocessor directive, while a "destpage" parameter gives the name of the
+ page the content is going to (different for inlined pages), and a "preview"
+ parameter is set to a true value if the page is being previewed.
+
+ If `hook` is passed an optional "scan" parameter, set to a true value, this
+ makes the hook be called during the preliminary scan that ikiwiki makes of
+ updated pages, before begining to render pages. This should be done if the
+ hook modifies data in `%links`. Note that doing so will make the hook be
+ run twice per page build, so avoid doing it for expensive hooks. (As an
+ optimisation, if your preprocessor hook is called in a void context, you
+ can assume it's being run in scan mode, and avoid doing expensive things at
+ that point.)
Note that if the [[htmlscrubber]] is enabled, html in
[[ikiwiki/PreProcessorDirective]] output is sanitised, which may limit what
`newpage`, and `content`, and should try to modify the content to reflect
the name change. For example, by converting links to point to the new page.
+### getsetup
+
+ hook(type => "getsetup", id => "foo", call => \&getsetup);
+
+This hooks is not called during normal operation, but only when setting up
+the wiki, or generating a setup file. Plugins can use this hook to add
+configuration options.
+
+The hook is passed no parameters. It returns data about the configuration
+options added by the plugin. It can also check if the plugin is usable, and
+die if not, which will cause the plugin to not be offered in the configuration
+interface.
+
+The data returned is a list of `%config` options, followed by a hash
+describing the option. For example:
+
+ return
+ option_foo => {
+ type => "boolean",
+ description => "enable foo",
+ safe => 1,
+ rebuild => 1,
+ },
+ option_bar => {
+ type => "string",
+ example => "hello",
+ description => "what to say",
+ safe => 1,
+ rebuild => 0,
+ },
+
+* `type` can be "boolean", "string", "integer", "pagespec", or "internal"
+ (used for values that are not user-visible). The type is the type of
+ the leaf values; the `%config` option may be an array or hash of these.
+* `example` can be set to an example value.
+* `description` is a short description of the option.
+* `safe` should be false if the option should not be displayed in unsafe
+ configuration methods, such as the web interface. Anything that specifies
+ a command to run, a path on disk, or a regexp should be marked as unsafe.
+* `rebuild` should be true if changing the option will require a wiki rebuild.
+
## Plugin interface
To import the ikiwiki plugin interface:
A plugin can access the wiki's configuration via the `%config`
hash. The best way to understand the contents of the hash is to look at
-[[ikiwiki.setup]], which sets the hash content to configure the wiki.
+your ikiwiki setup file, which sets the hash content to configure the wiki.
### %pagestate
### RCS plugins
-ikiwiki's support for [[revision_control_systems|rcs]] also uses pluggable
-perl modules. These are in the `IkiWiki::RCS` namespace, for example
-`IkiWiki::RCS::svn`.
+ikiwiki's support for [[revision_control_systems|rcs]] is also done via
+plugins. See [[RCS_details|rcs/details]] for some more info.
+
+RCS plugins must register a number of hooks. Each hook has type 'rcs',
+and the 'id' field is set to the name of the hook. For example:
+
+ hook(type => "rcs", id => "rcs_update", call => \&rcs_update);
+ hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit);
+
+#### `rcs_update()`
+
+Updates the working directory with any remote changes.
+
+#### `rcs_prepedit($)`
+
+Is passed a file to prepare to edit. It can generate and return an arbitrary
+token, that will be passed into `rcs_commit` when committing. For example,
+it might return the current revision ID of the file, and use that
+information later when merging changes.
+
+#### `rcs_commit($$$;$$)`
+
+Passed a file, message, token (from `rcs_prepedit`), user, and ip address.
+Should try to commit the file. Returns `undef` on *success* and a version
+of the page with the rcs's conflict markers on failure.
+
+#### `rcs_commit_staged($$$)`
+
+Passed a message, user, and ip address. Should commit all staged changes.
+Returns undef on success, and an error message on failure.
+
+Changes can be staged by calls to `rcs_add, `rcs_remove`, and
+`rcs_rename`.
+
+#### `rcs_add($)`
+
+Adds the passed file to the archive. The filename is relative to the root
+of the srcdir.
+
+Note that this should not check the new file in, it should only
+prepare for it to be checked in when rcs_commit (or `rcs_commit_staged`) is
+called. Note that the file may be in a new subdir that is not yet in
+to version control; the subdir can be added if so.
+
+#### `rcs_remove($)`
+
+Remove a file. The filename is relative to the root of the srcdir.
+
+Note that this should not check the removal in, it should only prepare for it
+to be checked in when `rcs_commit` (or `rcs_commit_staged`) is called. Note
+that the new file may be in a new subdir that is not yet inversion
+control; the subdir can be added if so.
+
+#### `rcs_rename($$)`
+
+Rename a file. The filenames are relative to the root of the srcdir.
+
+Note that this should not commit the rename, it should only
+prepare it for when `rcs_commit` (or `rcs_commit_staged`) is called.
+The new filename may be in a new subdir, that is not yet added to
+version control. If so, the subdir will exist already, and should
+be added to revision control.
+
+#### `rcs_recentchanges($)`
+
+Examine the RCS history and generate a list of recent changes.
+The parameter is how many changes to return.
+
+The data structure returned for each change is:
+
+ {
+ rev => # the RCSs id for this commit
+ user => # name of user who made the change,
+ committype => # either "web" or the name of the rcs,
+ when => # time when the change was made,
+ message => [
+ { line => "commit message line 1" },
+ { line => "commit message line 2" },
+ # etc,
+ ],
+ pages => [
+ {
+ page => # name of page changed,
+ diffurl => # optional url to a diff of changes
+ },
+ # repeat for each page changed in this commit,
+ ],
+ }
+
+#### `rcs_diff($)`
+
+The parameter is the rev from `rcs_recentchanges`.
+Should return a list of lines of the diff (including \n) in list
+context, and the whole diff in scalar context.
+
+#### `rcs_getctime($)`
-Each RCS plugin must support all the `IkiWiki::rcs_*` functions.
-See IkiWiki::RCS::Stub for the full list of functions. It's ok if
-`rcs_getctime` does nothing except for throwing an error.
+This is used to get the page creation time for a file from the RCS, by looking
+it up in the history.
-See [[RCS_details|rcs/details]] for some more info.
+It's ok if this is not implemented, and throws an error.
### PageSpec plugins
### Setup plugins
-The ikiwiki setup file is loaded using a pluggable mechanism. If you
-look at the top of [[ikiwiki.setup]], it starts with
-'use IkiWiki::Setup::Standard', and the rest of the file is passed to
-that module's import method.
+The ikiwiki setup file is loaded using a pluggable mechanism. If you look
+at the top of a setup file, it starts with 'use IkiWiki::Setup::Standard',
+and the rest of the file is passed to that module's import method.
It's possible to write other modules in the `IkiWiki::Setup::` namespace that
can be used to configure ikiwiki in different ways. These modules should,
when imported, populate `$IkiWiki::Setup::raw_setup` with a reference
-to a hash containing all the config items.
+to a hash containing all the config items. They should also implement a
+`gendump` function.
By the way, to parse a ikiwiki setup file, a program just needs to
do something like: