From: Joey Hess Date: Sun, 27 Jul 2008 03:22:42 +0000 (-0400) Subject: Merge branch 'master' into autoconfig X-Git-Tag: 2.60~165 X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/commitdiff_plain/4ef96e2d9994c407f0f2f46301eb91fab6b48a37?hp=ecc40d1769c84a2cca3070e848adfd8710f27d8b Merge branch 'master' into autoconfig --- diff --git a/.gitignore b/.gitignore index bb02bcb31..57991a732 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +ikiwiki.setup Makefile Makefile.old blib/* diff --git a/IkiWiki.pm b/IkiWiki.pm index 3f8a4bca3..0746ef24b 100644 --- a/IkiWiki.pm +++ b/IkiWiki.pm @@ -32,65 +32,349 @@ memoize("abs2rel"); memoize("pagespec_translate"); memoize("file_pruned"); +sub getsetup () { #{{{ + wikiname => { + type => "string", + default => "wiki", + description => "name of the wiki", + safe => 1, + rebuild => 1, + }, + srcdir => { + type => "string", + default => undef, + example => "$ENV{HOME}/wiki", + description => "where the source of the wiki is located", + safe => 0, # path + rebuild => 1, + }, + destdir => { + type => "string", + default => undef, + example => "/var/www/wiki", + description => "where to build the wiki", + safe => 0, # path + rebuild => 1, + }, + adminuser => { + type => "string", + default => [], + description => "user names of wiki admins", + safe => 1, + rebuild => 0, + }, + adminemail => { + type => "string", + default => undef, + example => 'me@example.com', + description => "contact email for wiki", + safe => 1, + rebuild => 0, + }, + url => { + type => "string", + default => '', + example => "http://example.com/wiki", + description => "base url to the wiki", + safe => 1, + rebuild => 1, + }, + cgiurl => { + type => "string", + default => '', + example => "http://example.com/wiki/ikiwiki.cgi", + description => "url to the ikiwiki.cgi", + safe => 1, + rebuild => 1, + }, + cgi_wrapper => { + type => "string", + default => '', + example => "/var/www/wiki/ikiwiki.cgi", + description => "cgi executable to generate", + safe => 0, # file + rebuild => 0, + }, + cgi_wrappermode => { + type => "string", + default => '06755', + description => "mode for cgi_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + rcs => { + type => "string", + default => '', + description => "rcs backend to use", + safe => 0, # don't allow overriding + rebuild => 0, + }, + default_plugins => { + type => "internal", + default => [qw{mdwn link inline htmlscrubber passwordauth + openid signinedit lockedit conditional + recentchanges parentlinks}], + description => "plugins to enable by default", + safe => 1, + rebuild => 1, + }, + add_plugins => { + type => "string", + default => [], + description => "plugins to add to the default configuration", + safe => 1, + rebuild => 1, + }, + disable_plugins => { + type => "string", + default => [], + description => "plugins to disable", + safe => 1, + rebuild => 1, + }, + templatedir => { + type => "string", + default => "$installdir/share/ikiwiki/templates", + description => "location of template files", + safe => 0, # path + rebuild => 1, + }, + underlaydir => { + type => "string", + default => "$installdir/share/ikiwiki/basewiki", + description => "base wiki source location", + safe => 0, # path + rebuild => 0, + }, + wrappers => { + type => "internal", + default => [], + description => "wrappers to generate", + safe => 0, + rebuild => 0, + }, + underlaydirs => { + type => "internal", + default => [], + description => "additional underlays to use", + safe => 0, + rebuild => 0, + }, + verbose => { + type => "boolean", + default => 0, + description => "display verbose messages when building?", + safe => 1, + rebuild => 0, + }, + syslog => { + type => "boolean", + default => 0, + description => "log to syslog?", + safe => 1, + rebuild => 0, + }, + usedirs => { + type => "boolean", + default => 1, + description => "create output files named page/index.html?", + safe => 0, # changing requires manual transition + rebuild => 1, + }, + prefix_directives => { + type => "boolean", + default => 0, + description => "use '!'-prefixed preprocessor directives?", + safe => 0, # changing requires manual transition + rebuild => 1, + }, + discussion => { + type => "boolean", + default => 1, + description => "enable Discussion pages?", + safe => 1, + rebuild => 1, + }, + default_pageext => { + type => "string", + default => "mdwn", + description => "extension to use for new pages", + safe => 0, # not sanitized + rebuild => 0, + }, + htmlext => { + type => "string", + default => "html", + description => "extension to use for html files", + safe => 0, # not sanitized + rebuild => 1, + }, + timeformat => { + type => "string", + default => '%c', + description => "strftime format string to display date", + safe => 1, + rebuild => 1, + }, + locale => { + type => "string", + default => undef, + example => "en_US.UTF-8", + description => "UTF-8 locale to use", + safe => 0, + rebuild => 1, + }, + sslcookie => { + type => "boolean", + default => 0, + description => "only send cookies over SSL connections?", + safe => 1, + rebuild => 0, + }, + userdir => { + type => "string", + default => "", + example => "users", + description => "put user pages below specified page", + safe => 1, + rebuild => 1, + }, + numbacklinks => { + type => "integer", + default => 10, + description => "how many backlinks to show before hiding excess (0 to show all)", + safe => 1, + rebuild => 1, + }, + hardlink => { + type => "boolean", + default => 0, + description => "attempt to hardlink source files? (optimisation for large files)", + safe => 0, # paranoia + rebuild => 0, + }, + umask => { + type => "integer", + description => "", + example => "022", + description => "force ikiwiki to use a particular umask", + safe => 0, # paranoia + rebuild => 0, + }, + libdir => { + type => "string", + default => "", + example => "$ENV{HOME}/.ikiwiki/", + description => "extra library and plugin directory", + safe => 0, # directory + rebuild => 0, + }, + ENV => { + type => "string", + default => {}, + description => "environment variables", + safe => 0, # paranoia + rebuild => 0, + }, + exclude => { + type => "string", + default => undef, + example => '\.wav$', + description => "regexp of source files to ignore", + safe => 0, # regexp + rebuild => 1, + }, + wiki_file_prune_regexps => { + type => "internal", + default => [qr/(^|\/)\.\.(\/|$)/, qr/^\./, qr/\/\./, + qr/\.x?html?$/, qr/\.ikiwiki-new$/, + qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//, + qr/(^|\/)_MTN\//, + qr/\.dpkg-tmp$/], + description => "regexps of source files to ignore", + safe => 0, + rebuild => 1, + }, + wiki_file_regexp => { + type => "internal", + default => qr/(^[-[:alnum:]_.:\/+]+$)/, + description => "regexp of legal source files", + safe => 0, + rebuild => 1, + }, + web_commit_regexp => { + type => "internal", + default => qr/^web commit (by (.*?(?=: |$))|from (\d+\.\d+\.\d+\.\d+)):?(.*)/, + description => "regexp to parse web commits from logs", + safe => 0, + rebuild => 0, + }, + cgi => { + type => "internal", + default => 0, + description => "run as a cgi", + safe => 0, + rebuild => 0, + }, + cgi_disable_uploads => { + type => "internal", + default => 1, + description => "whether CGI should accept file uploads", + safe => 0, + rebuild => 0, + }, + post_commit => { + type => "internal", + default => 0, + description => "run as a post-commit hook", + safe => 0, + rebuild => 0, + }, + rebuild => { + type => "internal", + default => 0, + description => "running in rebuild mode", + safe => 0, + rebuild => 0, + }, + refresh => { + type => "internal", + default => 0, + description => "running in refresh mode", + safe => 0, + rebuild => 0, + }, + getctime => { + type => "internal", + default => 0, + description => "running in getctime mode", + safe => 0, + rebuild => 0, + }, + w3mmode => { + type => "internal", + default => 0, + description => "running in w3mmode", + safe => 0, + rebuild => 0, + }, + setup => { + type => "internal", + default => undef, + description => "setup file to read", + safe => 0, + rebuild => 0, + }, +} #}}} + sub defaultconfig () { #{{{ - return - wiki_file_prune_regexps => [qr/(^|\/)\.\.(\/|$)/, qr/^\./, qr/\/\./, - qr/\.x?html?$/, qr/\.ikiwiki-new$/, - qr/(^|\/).svn\//, qr/.arch-ids\//, qr/{arch}\//, - qr/(^|\/)_MTN\//, - qr/\.dpkg-tmp$/], - wiki_file_regexp => qr/(^[-[:alnum:]_.:\/+]+$)/, - web_commit_regexp => qr/^web commit (by (.*?(?=: |$))|from (\d+\.\d+\.\d+\.\d+)):?(.*)/, - verbose => 0, - syslog => 0, - wikiname => "wiki", - default_pageext => "mdwn", - htmlext => "html", - cgi => 0, - post_commit => 0, - rcs => '', - url => '', - cgiurl => '', - historyurl => '', - diffurl => '', - rss => 0, - atom => 0, - allowrss => 0, - allowatom => 0, - discussion => 1, - rebuild => 0, - refresh => 0, - getctime => 0, - w3mmode => 0, - wrapper => undef, - wrappermode => undef, - svnpath => "trunk", - gitorigin_branch => "origin", - gitmaster_branch => "master", - srcdir => undef, - destdir => undef, - pingurl => [], - templatedir => "$installdir/share/ikiwiki/templates", - underlaydir => "$installdir/share/ikiwiki/basewiki", - underlaydirs => [], - setup => undef, - adminuser => undef, - adminemail => undef, - plugin => [qw{mdwn link inline htmlscrubber passwordauth openid - signinedit lockedit conditional recentchanges - parentlinks}], - libdir => undef, - timeformat => '%c', - locale => undef, - sslcookie => 0, - httpauth => 0, - userdir => "", - usedirs => 1, - numbacklinks => 10, - account_creation_password => "", - prefix_directives => 0, - hardlink => 0, - cgi_disable_uploads => 1, + my %s=getsetup(); + my @ret; + foreach my $key (keys %s) { + push @ret, $key, $s{$key}->{default}; + } + use Data::Dumper; + return @ret; } #}}} sub checkconfig () { #{{{ @@ -130,16 +414,13 @@ sub checkconfig () { #{{{ unless exists $config{wikistatedir}; if ($config{rcs}) { - eval qq{use IkiWiki::Rcs::$config{rcs}}; - if ($@) { - error("Failed to load RCS module IkiWiki::Rcs::$config{rcs}: $@"); - } + loadplugin($config{rcs}); } else { - require IkiWiki::Rcs::Stub; + loadplugin("norcs"); } - if (exists $config{umask}) { + if (defined $config{umask}) { umask(possibly_foolish_untaint($config{umask})); } @@ -148,12 +429,32 @@ sub checkconfig () { #{{{ return 1; } #}}} +sub listplugins () { #{{{ + my %ret; + + foreach my $dir (@INC, $config{libdir}) { + next unless defined $dir; + foreach my $file (glob("$dir/IkiWiki/Plugin/*.pm")) { + my ($plugin)=$file=~/.*\/(.*)\.pm$/; + $ret{$plugin}=1; + } + } + foreach my $dir ($config{libdir}, "$installdir/lib/ikiwiki") { + next unless defined $dir; + foreach my $file (glob("$dir/plugins/*")) { + $ret{basename($file)}=1 if -x $file; + } + } + + return keys %ret; +} #}}} + sub loadplugins () { #{{{ if (defined $config{libdir}) { unshift @INC, possibly_foolish_untaint($config{libdir}); } - loadplugin($_) foreach @{$config{plugin}}; + loadplugin($_) foreach @{$config{default_plugins}}, @{$config{add_plugins}}; run_hooks(getopt => sub { shift->() }); if (grep /^-/, @ARGV) { @@ -1124,6 +1425,46 @@ sub run_hooks ($$) { # {{{ return 1; } #}}} +sub rcs_update () { #{{{ + $hooks{rcs}{rcs_update}{call}->(@_); +} #}}} + +sub rcs_prepedit ($) { #{{{ + $hooks{rcs}{rcs_prepedit}{call}->(@_); +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + $hooks{rcs}{rcs_commit}{call}->(@_); +} #}}} + +sub rcs_commit_staged ($$$) { #{{{ + $hooks{rcs}{rcs_commit_staged}{call}->(@_); +} #}}} + +sub rcs_add ($) { #{{{ + $hooks{rcs}{rcs_add}{call}->(@_); +} #}}} + +sub rcs_remove ($) { #{{{ + $hooks{rcs}{rcs_remove}{call}->(@_); +} #}}} + +sub rcs_rename ($$) { #{{{ + $hooks{rcs}{rcs_rename}{call}->(@_); +} #}}} + +sub rcs_recentchanges ($) { #{{{ + $hooks{rcs}{rcs_recentchanges}{call}->(@_); +} #}}} + +sub rcs_diff ($) { #{{{ + $hooks{rcs}{rcs_diff}{call}->(@_); +} #}}} + +sub rcs_getctime ($) { #{{{ + $hooks{rcs}{rcs_getctime}{call}->(@_); +} #}}} + sub globlist_to_pagespec ($) { #{{{ my @globlist=split(' ', shift); diff --git a/IkiWiki/Plugin/aggregate.pm b/IkiWiki/Plugin/aggregate.pm index e000bc864..673668c0e 100644 --- a/IkiWiki/Plugin/aggregate.pm +++ b/IkiWiki/Plugin/aggregate.pm @@ -16,6 +16,7 @@ my %guids; sub import { #{{{ hook(type => "getopt", id => "aggregate", call => \&getopt); + hook(type => "getsetup", id => "aggregate", call => \&getsetup); hook(type => "checkconfig", id => "aggregate", call => \&checkconfig); hook(type => "needsbuild", id => "aggregate", call => \&needsbuild); hook(type => "preprocess", id => "aggregate", call => \&preprocess); @@ -37,6 +38,24 @@ sub getopt () { #{{{ ); } #}}} +sub getsetup () { #{{{ + return + aggregateinternal => { + type => "boolean", + example => 0, + description => "enable aggregation to internal pages?", + safe => 0, # enabling needs manual transition + rebuild => 0, + }, + aggregate_webtrigger => { + type => "boolean", + example => 0, + description => "allow aggregation to be triggered via the web?", + safe => 1, + rebuild => 0, + }, +} #}}} + sub checkconfig () { #{{{ if ($config{aggregate} && ! ($config{post_commit} && IkiWiki::commit_hook_enabled())) { diff --git a/IkiWiki/Plugin/amazon_s3.pm b/IkiWiki/Plugin/amazon_s3.pm index 187700f30..e181a84da 100644 --- a/IkiWiki/Plugin/amazon_s3.pm +++ b/IkiWiki/Plugin/amazon_s3.pm @@ -18,6 +18,7 @@ BEGIN { sub import { #{{{ hook(type => "getopt", id => "amazon_s3", call => \&getopt); + hook(type => "getsetup", id => "amazon_s3", call => \&getsetup); hook(type => "checkconfig", id => "amazon_s3", call => \&checkconfig); } # }}} @@ -39,6 +40,52 @@ sub getopt () { #{{{ }); } #}}} +sub getsetup () { #{{{ + return + amazon_s3_key_id => { + type => "string", + example => "XXXXXXXXXXXXXXXXXXXX", + description => "public access key id", + safe => 1, + rebuild => 0, + }, + amazon_s3_key_id => { + type => "string", + example => "$ENV{HOME}/.s3_key", + description => "file holding secret key (must not be readable by others!)", + safe => 0, # ikiwiki reads this file + rebuild => 0, + }, + amazon_s3_bucket => { + type => "string", + example => "mywiki", + description => "globally unique name of bucket to store wiki in", + safe => 1, + rebuild => 1, + }, + amazon_s3_prefix => { + type => "string", + example => "wiki/", + description => "a prefix to prepend to each page name", + safe => 1, + rebuild => 1, + }, + amazon_s3_location => { + type => "string", + example => "EU", + description => "which S3 datacenter to use (leave blank for default)", + safe => 1, + rebuild => 1, + }, + amazon_s3_dupindex => { + type => "boolean", + example => 0, + description => "store each index file twice? (allows urls ending in \"/index.html\" and \"/\")", + safe => 1, + rebuild => 1, + }, +} #}}} + sub checkconfig { #{{{ foreach my $field (qw{amazon_s3_key_id amazon_s3_key_file amazon_s3_bucket}) { diff --git a/IkiWiki/Plugin/anonok.pm b/IkiWiki/Plugin/anonok.pm index 1880516d5..7b966f845 100644 --- a/IkiWiki/Plugin/anonok.pm +++ b/IkiWiki/Plugin/anonok.pm @@ -6,9 +6,21 @@ use strict; use IkiWiki 2.00; sub import { #{{{ - hook(type => "canedit", id => "anonok", call => \&canedit,); + hook(type => "getsetup", id => "anonok", call => \&getsetup); + hook(type => "canedit", id => "anonok", call => \&canedit); } # }}} +sub getsetup () { #{{{ + return + anonok_pagespec => { + type => "string", + example => "*/discussion", + description => "PageSpec to limit which pages anonymous users can edit", + safe => 1, + rebuild => 0, + }, +} #}}} + sub canedit ($$$) { #{{{ my $page=shift; my $cgi=shift; diff --git a/IkiWiki/Plugin/attachment.pm b/IkiWiki/Plugin/attachment.pm index 720078be1..47e165251 100644 --- a/IkiWiki/Plugin/attachment.pm +++ b/IkiWiki/Plugin/attachment.pm @@ -6,11 +6,23 @@ use strict; use IkiWiki 2.00; sub import { #{{{ + hook(type => "getsetup", id => "attachment", call => \&getsetup); hook(type => "checkconfig", id => "attachment", call => \&checkconfig); hook(type => "formbuilder_setup", id => "attachment", call => \&formbuilder_setup); hook(type => "formbuilder", id => "attachment", call => \&formbuilder); } # }}} +sub getsetup () { #{{{ + return + virus_checker => { + type => "string", + example => "clamdscan -", + description => "virus checker program (reads STDIN, returns nonzero if virus found)", + safe => 0, # executed + rebuild => 0, + }, +} #}}} + sub check_canattach ($$;$) { #{{{ my $session=shift; my $dest=shift; # where it's going to be put, under the srcdir diff --git a/IkiWiki/Plugin/bzr.pm b/IkiWiki/Plugin/bzr.pm new file mode 100644 index 000000000..39227cbae --- /dev/null +++ b/IkiWiki/Plugin/bzr.pm @@ -0,0 +1,278 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::bzr; + +use warnings; +use strict; +use IkiWiki; +use Encode; +use open qw{:utf8 :std}; + +sub import { #{{{ + hook(type => "checkconfig", id => "bzr", call => \&checkconfig); + hook(type => "getsetup", id => "bzr", call => \&getsetup); + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub checkconfig () { #{{{ + if (! defined $config{diffurl}) { + $config{diffurl}=""; + } + if (defined $config{bzr_wrapper} && length $config{bzr_wrapper}) { + push @{$config{wrappers}}, { + wrapper => $config{bzr_wrapper}, + wrappermode => (defined $config{bzr_wrappermode} ? $config{bzr_wrappermode} : "06755"), + }; + } +} #}}} + +sub getsetup () { #{{{ + return + bzr_wrapper => { + type => "string", + #example => "", # FIXME add example + description => "bzr post-commit executable to generate", + safe => 0, # file + rebuild => 0, + }, + bzr_wrappermode => { + type => "string", + example => '06755', + description => "mode for bzr_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + historyurl => { + type => "string", + #example => "", # FIXME add example + description => "url to show file history, using loggerhead ([[file]] substituted)", + safe => 1, + rebuild => 1, + }, + diffurl => { + type => "string", + example => "http://example.com/revision?start_revid=[[r2]]#[[file]]-s", + description => "url to view a diff, using loggerhead ([[file]] and [[r2]] substituted)", + safe => 1, + rebuild => 1, + }, +} #}}} + +sub bzr_log ($) { #{{{ + my $out = shift; + my @infos = (); + my $key = undef; + + while (<$out>) { + my $line = $_; + my ($value); + if ($line =~ /^message:/) { + $key = "message"; + $infos[$#infos]{$key} = ""; + } + elsif ($line =~ /^(modified|added|renamed|renamed and modified|removed):/) { + $key = "files"; + unless (defined($infos[$#infos]{$key})) { $infos[$#infos]{$key} = ""; } + } + elsif (defined($key) and $line =~ /^ (.*)/) { + $infos[$#infos]{$key} .= "$1\n"; + } + elsif ($line eq "------------------------------------------------------------\n") { + $key = undef; + push (@infos, {}); + } + else { + chomp $line; + ($key, $value) = split /: +/, $line, 2; + $infos[$#infos]{$key} = $value; + } + } + close $out; + + return @infos; +} #}}} + +sub rcs_update () { #{{{ + my @cmdline = ("bzr", "update", "--quiet", $config{srcdir}); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } +} #}}} + +sub rcs_prepedit ($) { #{{{ + return ""; +} #}}} + +sub bzr_author ($$) { #{{{ + my ($user, $ipaddr) = @_; + + if (defined $user) { + return IkiWiki::possibly_foolish_untaint($user); + } + elsif (defined $ipaddr) { + return "Anonymous from ".IkiWiki::possibly_foolish_untaint($ipaddr); + } + else { + return "Anonymous"; + } +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + my ($file, $message, $rcstoken, $user, $ipaddr) = @_; + + $user = bzr_author($user, $ipaddr); + + $message = IkiWiki::possibly_foolish_untaint($message); + if (! length $message) { + $message = "no message given"; + } + + my @cmdline = ("bzr", "commit", "--quiet", "-m", $message, "--author", $user, + $config{srcdir}."/".$file); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } + + return undef; # success +} #}}} + +sub rcs_commit_staged ($$$) { + # Commits all staged changes. Changes can be staged using rcs_add, + # rcs_remove, and rcs_rename. + my ($message, $user, $ipaddr)=@_; + + $user = bzr_author($user, $ipaddr); + + $message = IkiWiki::possibly_foolish_untaint($message); + if (! length $message) { + $message = "no message given"; + } + + my @cmdline = ("bzr", "commit", "--quiet", "-m", $message, "--author", $user, + $config{srcdir}); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } + + return undef; # success +} #}}} + +sub rcs_add ($) { # {{{ + my ($file) = @_; + + my @cmdline = ("bzr", "add", "--quiet", "$config{srcdir}/$file"); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } +} #}}} + +sub rcs_remove ($) { # {{{ + my ($file) = @_; + + my @cmdline = ("bzr", "rm", "--force", "--quiet", "$config{srcdir}/$file"); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } +} #}}} + +sub rcs_rename ($$) { # {{{ + my ($src, $dest) = @_; + + my $parent = IkiWiki::dirname($dest); + if (system("bzr", "add", "--quiet", "$config{srcdir}/$parent") != 0) { + warn("bzr add $parent failed\n"); + } + + my @cmdline = ("bzr", "mv", "--quiet", "$config{srcdir}/$src", "$config{srcdir}/$dest"); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } +} #}}} + +sub rcs_recentchanges ($) { #{{{ + my ($num) = @_; + + my @cmdline = ("bzr", "log", "-v", "--show-ids", "--limit", $num, + $config{srcdir}); + open (my $out, "@cmdline |"); + + eval q{use Date::Parse}; + error($@) if $@; + + my @ret; + foreach my $info (bzr_log($out)) { + my @pages = (); + my @message = (); + + foreach my $msgline (split(/\n/, $info->{message})) { + push @message, { line => $msgline }; + } + + foreach my $file (split(/\n/, $info->{files})) { + my ($filename, $fileid) = ($file =~ /^(.*?) +([^ ]+)$/); + + # Skip directories + next if ($filename =~ /\/$/); + + # Skip source name in renames + $filename =~ s/^.* => //; + + my $diffurl = $config{'diffurl'}; + $diffurl =~ s/\[\[file\]\]/$filename/go; + $diffurl =~ s/\[\[file-id\]\]/$fileid/go; + $diffurl =~ s/\[\[r2\]\]/$info->{revno}/go; + + push @pages, { + page => pagename($filename), + diffurl => $diffurl, + }; + } + + my $user = $info->{"committer"}; + if (defined($info->{"author"})) { $user = $info->{"author"}; } + $user =~ s/\s*<.*>\s*$//; + $user =~ s/^\s*//; + + push @ret, { + rev => $info->{"revno"}, + user => $user, + committype => "bzr", + when => time - str2time($info->{"timestamp"}), + message => [@message], + pages => [@pages], + }; + } + + return @ret; +} #}}} + +sub rcs_getctime ($) { #{{{ + my ($file) = @_; + + # XXX filename passes through the shell here, should try to avoid + # that just in case + my @cmdline = ("bzr", "log", "--limit", '1', "$config{srcdir}/$file"); + open (my $out, "@cmdline |"); + + my @log = bzr_log($out); + + if (length @log < 1) { + return 0; + } + + eval q{use Date::Parse}; + error($@) if $@; + + my $ctime = str2time($log[0]->{"timestamp"}); + return $ctime; +} #}}} + +1 diff --git a/IkiWiki/Plugin/calendar.pm b/IkiWiki/Plugin/calendar.pm index aed087eed..6f1f9bd07 100644 --- a/IkiWiki/Plugin/calendar.pm +++ b/IkiWiki/Plugin/calendar.pm @@ -30,10 +30,22 @@ my $time=time; my @now=localtime($time); sub import { #{{{ - hook(type => "needsbuild", id => "version", call => \&needsbuild); + hook(type => "getsetup", id => "calendar", call => \&getsetup); + hook(type => "needsbuild", id => "calendar", call => \&needsbuild); hook(type => "preprocess", id => "calendar", call => \&preprocess); } #}}} +sub getsetup () { #{{{ + return + archivebase => { + type => "string", + example => "archives", + description => "base of the archives hierarchy", + safe => 1, + rebuild => 1, + }, +} #}}} + sub is_leap_year (@) { #{{{ my %params=@_; return ($params{year} % 4 == 0 && (($params{year} % 100 != 0) || $params{year} % 400 == 0)); diff --git a/IkiWiki/Plugin/git.pm b/IkiWiki/Plugin/git.pm new file mode 100644 index 000000000..b20793d86 --- /dev/null +++ b/IkiWiki/Plugin/git.pm @@ -0,0 +1,552 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::git; + +use warnings; +use strict; +use IkiWiki; +use Encode; +use open qw{:utf8 :std}; + +my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate Git sha1sums +my $dummy_commit_msg = 'dummy commit'; # message to skip in recent changes + +sub import { #{{{ + hook(type => "checkconfig", id => "git", call => \&checkconfig); + hook(type => "getsetup", id => "git", call => \&getsetup); + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub checkconfig () { #{{{ + if (! defined $config{diffurl}) { + $config{diffurl}=""; + } + if (! defined $config{gitorigin_branch}) { + $config{gitorigin_branch}="origin"; + } + if (! defined $config{gitmaster_branch}) { + $config{gitmaster_branch}="master"; + } + if (defined $config{git_wrapper} && length $config{git_wrapper}) { + push @{$config{wrappers}}, { + wrapper => $config{git_wrapper}, + wrappermode => (defined $config{git_wrappermode} ? $config{git_wrappermode} : "06755"), + }; + } +} #}}} + +sub getsetup () { #{{{ + return + git_wrapper => { + type => "string", + example => "/git/wiki.git/hooks/post-update", + description => "git post-update executable to generate", + safe => 0, # file + rebuild => 0, + }, + git_wrappermode => { + type => "string", + example => '06755', + description => "mode for git_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + historyurl => { + type => "string", + example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=history;f=[[file]]", + description => "gitweb url to show file history ([[file]] substituted)", + safe => 1, + rebuild => 1, + }, + diffurl => { + type => "string", + example => "http://git.example.com/gitweb.cgi?p=wiki.git;a=blobdiff;h=[[sha1_to]];hp=[[sha1_from]];hb=[[sha1_parent]];f=[[file]]", + description => "gitweb url to show a diff ([[sha1_to]], [[sha1_from]], [[sha1_parent]], and [[file]] substituted)", + safe => 1, + rebuild => 1, + }, + gitorigin_branch => { + type => "string", + example => "origin", + description => "where to pull and push changes (set to empty string to disable)", + safe => 0, # paranoia + rebuild => 0, + }, + gitmaster_branch => { + type => "string", + example => "master", + description => "branch that the wiki is stored in", + safe => 0, # paranoia + rebuild => 0, + }, +} #}}} + +sub safe_git (&@) { #{{{ + # Start a child process safely without resorting /bin/sh. + # Return command output or success state (in scalar context). + + my ($error_handler, @cmdline) = @_; + + my $pid = open my $OUT, "-|"; + + error("Cannot fork: $!") if !defined $pid; + + if (!$pid) { + # In child. + # Git commands want to be in wc. + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + exec @cmdline or error("Cannot exec '@cmdline': $!"); + } + # In parent. + + my @lines; + while (<$OUT>) { + chomp; + push @lines, $_; + } + + close $OUT; + + $error_handler->("'@cmdline' failed: $!") if $? && $error_handler; + + return wantarray ? @lines : ($? == 0); +} +# Convenient wrappers. +sub run_or_die ($@) { safe_git(\&error, @_) } +sub run_or_cry ($@) { safe_git(sub { warn @_ }, @_) } +sub run_or_non ($@) { safe_git(undef, @_) } +#}}} + +sub merge_past ($$$) { #{{{ + # Unlike with Subversion, Git cannot make a 'svn merge -rN:M file'. + # Git merge commands work with the committed changes, except in the + # implicit case of '-m' of git checkout(1). So we should invent a + # kludge here. In principle, we need to create a throw-away branch + # in preparing for the merge itself. Since branches are cheap (and + # branching is fast), this shouldn't cost high. + # + # The main problem is the presence of _uncommitted_ local changes. One + # possible approach to get rid of this situation could be that we first + # make a temporary commit in the master branch and later restore the + # initial state (this is possible since Git has the ability to undo a + # commit, i.e. 'git reset --soft HEAD^'). The method can be summarized + # as follows: + # + # - create a diff of HEAD:current-sha1 + # - dummy commit + # - create a dummy branch and switch to it + # - rewind to past (reset --hard to the current-sha1) + # - apply the diff and commit + # - switch to master and do the merge with the dummy branch + # - make a soft reset (undo the last commit of master) + # + # The above method has some drawbacks: (1) it needs a redundant commit + # just to get rid of local changes, (2) somewhat slow because of the + # required system forks. Until someone points a more straight method + # (which I would be grateful) I have implemented an alternative method. + # In this approach, we hide all the modified files from Git by renaming + # them (using the 'rename' builtin) and later restore those files in + # the throw-away branch (that is, we put the files themselves instead + # of applying a patch). + + my ($sha1, $file, $message) = @_; + + my @undo; # undo stack for cleanup in case of an error + my $conflict; # file content with conflict markers + + eval { + # Hide local changes from Git by renaming the modified file. + # Relative paths must be converted to absolute for renaming. + my ($target, $hidden) = ( + "$config{srcdir}/${file}", "$config{srcdir}/${file}.${sha1}" + ); + rename($target, $hidden) + or error("rename '$target' to '$hidden' failed: $!"); + # Ensure to restore the renamed file on error. + push @undo, sub { + return if ! -e "$hidden"; # already renamed + rename($hidden, $target) + or warn "rename '$hidden' to '$target' failed: $!"; + }; + + my $branch = "throw_away_${sha1}"; # supposed to be unique + + # Create a throw-away branch and rewind backward. + push @undo, sub { run_or_cry('git', 'branch', '-D', $branch) }; + run_or_die('git', 'branch', $branch, $sha1); + + # Switch to throw-away branch for the merge operation. + push @undo, sub { + if (!run_or_cry('git', 'checkout', $config{gitmaster_branch})) { + run_or_cry('git', 'checkout','-f',$config{gitmaster_branch}); + } + }; + run_or_die('git', 'checkout', $branch); + + # Put the modified file in _this_ branch. + rename($hidden, $target) + or error("rename '$hidden' to '$target' failed: $!"); + + # _Silently_ commit all modifications in the current branch. + run_or_non('git', 'commit', '-m', $message, '-a'); + # ... and re-switch to master. + run_or_die('git', 'checkout', $config{gitmaster_branch}); + + # Attempt to merge without complaining. + if (!run_or_non('git', 'pull', '--no-commit', '.', $branch)) { + $conflict = readfile($target); + run_or_die('git', 'reset', '--hard'); + } + }; + my $failure = $@; + + # Process undo stack (in reverse order). By policy cleanup + # actions should normally print a warning on failure. + while (my $handle = pop @undo) { + $handle->(); + } + + error("Git merge failed!\n$failure\n") if $failure; + + return $conflict; +} #}}} + +sub parse_diff_tree ($@) { #{{{ + # Parse the raw diff tree chunk and return the info hash. + # See git-diff-tree(1) for the syntax. + + my ($prefix, $dt_ref) = @_; + + # End of stream? + return if !defined @{ $dt_ref } || + !defined @{ $dt_ref }[0] || !length @{ $dt_ref }[0]; + + my %ci; + # Header line. + while (my $line = shift @{ $dt_ref }) { + return if $line !~ m/^(.+) ($sha1_pattern)/; + + my $sha1 = $2; + $ci{'sha1'} = $sha1; + last; + } + + # Identification lines for the commit. + while (my $line = shift @{ $dt_ref }) { + # Regexps are semi-stolen from gitweb.cgi. + if ($line =~ m/^tree ([0-9a-fA-F]{40})$/) { + $ci{'tree'} = $1; + } + elsif ($line =~ m/^parent ([0-9a-fA-F]{40})$/) { + # XXX: collecting in reverse order + push @{ $ci{'parents'} }, $1; + } + elsif ($line =~ m/^(author|committer) (.*) ([0-9]+) (.*)$/) { + my ($who, $name, $epoch, $tz) = + ($1, $2, $3, $4 ); + + $ci{ $who } = $name; + $ci{ "${who}_epoch" } = $epoch; + $ci{ "${who}_tz" } = $tz; + + if ($name =~ m/^[^<]+\s+<([^@>]+)/) { + $ci{"${who}_username"} = $1; + } + elsif ($name =~ m/^([^<]+)\s+<>$/) { + $ci{"${who}_username"} = $1; + } + else { + $ci{"${who}_username"} = $name; + } + } + elsif ($line =~ m/^$/) { + # Trailing empty line signals next section. + last; + } + } + + debug("No 'tree' seen in diff-tree output") if !defined $ci{'tree'}; + + if (defined $ci{'parents'}) { + $ci{'parent'} = @{ $ci{'parents'} }[0]; + } + else { + $ci{'parent'} = 0 x 40; + } + + # Commit message (optional). + while ($dt_ref->[0] =~ /^ /) { + my $line = shift @{ $dt_ref }; + $line =~ s/^ //; + push @{ $ci{'comment'} }, $line; + } + shift @{ $dt_ref } if $dt_ref->[0] =~ /^$/; + + # Modified files. + while (my $line = shift @{ $dt_ref }) { + if ($line =~ m{^ + (:+) # number of parents + ([^\t]+)\t # modes, sha1, status + (.*) # file names + $}xo) { + my $num_parents = length $1; + my @tmp = split(" ", $2); + my ($file, $file_to) = split("\t", $3); + my @mode_from = splice(@tmp, 0, $num_parents); + my $mode_to = shift(@tmp); + my @sha1_from = splice(@tmp, 0, $num_parents); + my $sha1_to = shift(@tmp); + my $status = shift(@tmp); + + if ($file =~ m/^"(.*)"$/) { + ($file=$1) =~ s/\\([0-7]{1,3})/chr(oct($1))/eg; + } + $file =~ s/^\Q$prefix\E//; + if (length $file) { + push @{ $ci{'details'} }, { + 'file' => decode_utf8($file), + 'sha1_from' => $sha1_from[0], + 'sha1_to' => $sha1_to, + }; + } + next; + }; + last; + } + + return \%ci; +} #}}} + +sub git_commit_info ($;$) { #{{{ + # Return an array of commit info hashes of num commits (default: 1) + # starting from the given sha1sum. + + my ($sha1, $num) = @_; + + $num ||= 1; + + my @raw_lines = run_or_die('git', 'log', "--max-count=$num", + '--pretty=raw', '--raw', '--abbrev=40', '--always', '-c', + '-r', $sha1, '--', '.'); + my ($prefix) = run_or_die('git', 'rev-parse', '--show-prefix'); + + my @ci; + while (my $parsed = parse_diff_tree(($prefix or ""), \@raw_lines)) { + push @ci, $parsed; + } + + warn "Cannot parse commit info for '$sha1' commit" if !@ci; + + return wantarray ? @ci : $ci[0]; +} #}}} + +sub git_sha1 (;$) { #{{{ + # Return head sha1sum (of given file). + + my $file = shift || q{--}; + + # Ignore error since a non-existing file might be given. + my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD', + '--', $file); + if ($sha1) { + ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now + } else { debug("Empty sha1sum for '$file'.") } + return defined $sha1 ? $sha1 : q{}; +} #}}} + +sub rcs_update () { #{{{ + # Update working directory. + + if (length $config{gitorigin_branch}) { + run_or_cry('git', 'pull', $config{gitorigin_branch}); + } +} #}}} + +sub rcs_prepedit ($) { #{{{ + # Return the commit sha1sum of the file when editing begins. + # This will be later used in rcs_commit if a merge is required. + + my ($file) = @_; + + return git_sha1($file); +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + # Try to commit the page; returns undef on _success_ and + # a version of the page with the rcs's conflict markers on + # failure. + + my ($file, $message, $rcstoken, $user, $ipaddr) = @_; + + # Check to see if the page has been changed by someone else since + # rcs_prepedit was called. + my $cur = git_sha1($file); + my ($prev) = $rcstoken =~ /^($sha1_pattern)$/; # untaint + + if (defined $cur && defined $prev && $cur ne $prev) { + my $conflict = merge_past($prev, $file, $dummy_commit_msg); + return $conflict if defined $conflict; + } + + rcs_add($file); + return rcs_commit_staged($message, $user, $ipaddr); +} #}}} + +sub rcs_commit_staged ($$$) { + # Commits all staged changes. Changes can be staged using rcs_add, + # rcs_remove, and rcs_rename. + my ($message, $user, $ipaddr)=@_; + + # Set the commit author and email to the web committer. + my %env=%ENV; + if (defined $user || defined $ipaddr) { + my $u=defined $user ? $user : $ipaddr; + $ENV{GIT_AUTHOR_NAME}=$u; + $ENV{GIT_AUTHOR_EMAIL}="$u\@web"; + } + + # git commit returns non-zero if file has not been really changed. + # so we should ignore its exit status (hence run_or_non). + $message = IkiWiki::possibly_foolish_untaint($message); + if (run_or_non('git', 'commit', '--cleanup=verbatim', + '-q', '-m', $message)) { + if (length $config{gitorigin_branch}) { + run_or_cry('git', 'push', $config{gitorigin_branch}); + } + } + + %ENV=%env; + return undef; # success +} + +sub rcs_add ($) { # {{{ + # Add file to archive. + + my ($file) = @_; + + run_or_cry('git', 'add', $file); +} #}}} + +sub rcs_remove ($) { # {{{ + # Remove file from archive. + + my ($file) = @_; + + run_or_cry('git', 'rm', '-f', $file); +} #}}} + +sub rcs_rename ($$) { # {{{ + my ($src, $dest) = @_; + + run_or_cry('git', 'mv', '-f', $src, $dest); +} #}}} + +sub rcs_recentchanges ($) { #{{{ + # List of recent changes. + + my ($num) = @_; + + eval q{use Date::Parse}; + error($@) if $@; + + my @rets; + foreach my $ci (git_commit_info('HEAD', $num)) { + # Skip redundant commits. + next if ($ci->{'comment'} && @{$ci->{'comment'}}[0] eq $dummy_commit_msg); + + my ($sha1, $when) = ( + $ci->{'sha1'}, + $ci->{'author_epoch'} + ); + + my @pages; + foreach my $detail (@{ $ci->{'details'} }) { + my $file = $detail->{'file'}; + + my $diffurl = $config{'diffurl'}; + $diffurl =~ s/\[\[file\]\]/$file/go; + $diffurl =~ s/\[\[sha1_parent\]\]/$ci->{'parent'}/go; + $diffurl =~ s/\[\[sha1_from\]\]/$detail->{'sha1_from'}/go; + $diffurl =~ s/\[\[sha1_to\]\]/$detail->{'sha1_to'}/go; + + push @pages, { + page => pagename($file), + diffurl => $diffurl, + }; + } + + my @messages; + my $pastblank=0; + foreach my $line (@{$ci->{'comment'}}) { + $pastblank=1 if $line eq ''; + next if $pastblank && $line=~m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i; + push @messages, { line => $line }; + } + + my $user=$ci->{'author_username'}; + my $web_commit = ($ci->{'author'} =~ /\@web>/); + + # compatability code for old web commit messages + if (! $web_commit && + defined $messages[0] && + $messages[0]->{line} =~ m/$config{web_commit_regexp}/) { + $user = defined $2 ? "$2" : "$3"; + $messages[0]->{line} = $4; + $web_commit=1; + } + + push @rets, { + rev => $sha1, + user => $user, + committype => $web_commit ? "web" : "git", + when => $when, + message => [@messages], + pages => [@pages], + } if @pages; + + last if @rets >= $num; + } + + return @rets; +} #}}} + +sub rcs_diff ($) { #{{{ + my $rev=shift; + my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint + my @lines; + foreach my $line (run_or_non("git", "show", $sha1)) { + if (@lines || $line=~/^diff --git/) { + push @lines, $line."\n"; + } + } + if (wantarray) { + return @lines; + } + else { + return join("", @lines); + } +} #}}} + +sub rcs_getctime ($) { #{{{ + my $file=shift; + # Remove srcdir prefix + $file =~ s/^\Q$config{srcdir}\E\/?//; + + my $sha1 = git_sha1($file); + my $ci = git_commit_info($sha1); + my $ctime = $ci->{'author_epoch'}; + debug("ctime for '$file': ". localtime($ctime)); + + return $ctime; +} #}}} + +1 diff --git a/IkiWiki/Plugin/graphviz.pm b/IkiWiki/Plugin/graphviz.pm index b13d15fa6..021aa6b23 100644 --- a/IkiWiki/Plugin/graphviz.pm +++ b/IkiWiki/Plugin/graphviz.pm @@ -9,7 +9,7 @@ use IkiWiki 2.00; use IPC::Open2; sub import { #{{{ - hook(type => "preprocess", id => "graph", call => \&graph); + hook(type => "preprocess", id => "graphviz", call => \&graph); } # }}} my %graphviz_programs = ( diff --git a/IkiWiki/Plugin/inline.pm b/IkiWiki/Plugin/inline.pm index 2f0901943..cdd0ab0dc 100644 --- a/IkiWiki/Plugin/inline.pm +++ b/IkiWiki/Plugin/inline.pm @@ -15,6 +15,7 @@ my $nested=0; sub import { #{{{ hook(type => "getopt", id => "inline", call => \&getopt); + hook(type => "getsetup", id => "inline", call => \&getsetup); hook(type => "checkconfig", id => "inline", call => \&checkconfig); hook(type => "sessioncgi", id => "inline", call => \&sessioncgi); hook(type => "preprocess", id => "inline", @@ -27,7 +28,6 @@ sub import { #{{{ # pings interrupting page builds. hook(type => "change", id => "inline", call => \&IkiWiki::pingurl); - } # }}} sub getopt () { #{{{ @@ -39,8 +39,50 @@ sub getopt () { #{{{ "atom!" => \$config{atom}, "allowrss!" => \$config{allowrss}, "allowatom!" => \$config{allowatom}, + "pingurl=s" => sub { + push @{$config{pingurl}}, $_[1]; + }, ); -} +} #}}} + +sub getsetup () { #{{{ + return + rss => { + type => "boolean", + example => 0, + description => "enable rss feeds by default?", + safe => 1, + rebuild => 1, + }, + atom => { + type => "boolean", + example => 0, + description => "enable atom feeds by default?", + safe => 1, + rebuild => 1, + }, + allowrss => { + type => "boolean", + example => 0, + description => "allow rss feeds to be used?", + safe => 1, + rebuild => 1, + }, + allowatom => { + type => "boolean", + example => 0, + description => "allow atom feeds to be used?", + safe => 1, + rebuild => 1, + }, + pingurl => { + type => "string", + example => "http://rpc.technorati.com/rpc/ping", + description => "urls to ping (using XML-RPC) on feed update", + safe => 1, + rebuild => 0, + }, +} #}}} sub checkconfig () { #{{{ if (($config{rss} || $config{atom}) && ! length $config{url}) { @@ -52,6 +94,9 @@ sub checkconfig () { #{{{ if ($config{atom}) { push @{$config{wiki_file_prune_regexps}}, qr/\.atom$/; } + if (! exists $config{pingurl}) { + $config{pingurl}=[]; + } } #}}} sub format (@) { #{{{ diff --git a/IkiWiki/Plugin/mdwn.pm b/IkiWiki/Plugin/mdwn.pm index 11f3f0137..332325adc 100644 --- a/IkiWiki/Plugin/mdwn.pm +++ b/IkiWiki/Plugin/mdwn.pm @@ -7,9 +7,21 @@ use strict; use IkiWiki 2.00; sub import { #{{{ + hook(type => "getsetup", id => "mdwn", call => \&getsetup); hook(type => "htmlize", id => "mdwn", call => \&htmlize); } # }}} +sub getsetup () { #{{{ + return + multimarkdown => { + type => "boolean", + example => 0, + description => "enable multimarkdown features?", + safe => 1, + rebuild => 1, + }, +} #}}} + my $markdown_sub; sub htmlize (@) { #{{{ my %params=@_; @@ -25,13 +37,13 @@ sub htmlize (@) { #{{{ if (exists $config{multimarkdown} && $config{multimarkdown}) { eval q{use Text::MultiMarkdown}; if ($@) { - error(gettext("multimarkdown is enabled, but Text::MultiMarkdown is not installed")); + debug(gettext("multimarkdown is enabled, but Text::MultiMarkdown is not installed")); } $markdown_sub=sub { Text::MultiMarkdown::markdown(shift, {use_metadata => 0}); } } - else { + if (! defined $markdown_sub) { eval q{use Text::Markdown}; if (! $@) { if (Text::Markdown->can('markdown')) { diff --git a/IkiWiki/Plugin/mercurial.pm b/IkiWiki/Plugin/mercurial.pm new file mode 100644 index 000000000..738be8c32 --- /dev/null +++ b/IkiWiki/Plugin/mercurial.pm @@ -0,0 +1,255 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::mercurial; + +use warnings; +use strict; +use IkiWiki; +use Encode; +use open qw{:utf8 :std}; + +sub import { #{{{ + hook(type => "checkconfig", id => "mercurial", call => \&checkconfig); + hook(type => "getsetup", id => "mercurial", call => \&getsetup); + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub checkconfig () { #{{{ + if (! defined $config{diffurl}) { + $config{diffurl}=""; + } + if (exists $config{mercurial_wrapper} && length $config{mercurial_wrapper}) { + push @{$config{wrappers}}, { + wrapper => $config{mercurial_wrapper}, + wrappermode => (defined $config{mercurial_wrappermode} ? $config{mercurial_wrappermode} : "06755"), + }; + } +} #}}} + +sub getsetup () { #{{{ + return + mercurial_wrapper => { + type => "string", + #example => # FIXME add example + description => "mercurial post-commit executable to generate", + safe => 0, # file + rebuild => 0, + }, + mercurial_wrappermode => { + type => "string", + example => '06755', + description => "mode for mercurial_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + historyurl => { + type => "string", + example => "http://example.com:8000/log/tip/[[file]]", + description => "url to hg serve'd repository, to show file history ([[file]] substituted)", + safe => 1, + rebuild => 1, + }, + diffurl => { + type => "string", + example => "http://localhost:8000/?fd=[[r2]];file=[[file]]", + description => "url to hg serve'd repository, to show diff ([[file]] and [[r2]] substituted)", + safe => 1, + rebuild => 1, + }, +} #}}} + +sub mercurial_log ($) { #{{{ + my $out = shift; + my @infos; + + while (<$out>) { + my $line = $_; + my ($key, $value); + + if (/^description:/) { + $key = "description"; + $value = ""; + + # slurp everything as the description text + # until the next changeset + while (<$out>) { + if (/^changeset: /) { + $line = $_; + last; + } + + $value .= $_; + } + + local $/ = ""; + chomp $value; + $infos[$#infos]{$key} = $value; + } + + chomp $line; + ($key, $value) = split /: +/, $line, 2; + + if ($key eq "changeset") { + push @infos, {}; + + # remove the revision index, which is strictly + # local to the repository + $value =~ s/^\d+://; + } + + $infos[$#infos]{$key} = $value; + } + close $out; + + return @infos; +} #}}} + +sub rcs_update () { #{{{ + my @cmdline = ("hg", "-q", "-R", "$config{srcdir}", "update"); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } +} #}}} + +sub rcs_prepedit ($) { #{{{ + return ""; +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + my ($file, $message, $rcstoken, $user, $ipaddr) = @_; + + if (defined $user) { + $user = IkiWiki::possibly_foolish_untaint($user); + } + elsif (defined $ipaddr) { + $user = "Anonymous from ".IkiWiki::possibly_foolish_untaint($ipaddr); + } + else { + $user = "Anonymous"; + } + + $message = IkiWiki::possibly_foolish_untaint($message); + if (! length $message) { + $message = "no message given"; + } + + my @cmdline = ("hg", "-q", "-R", $config{srcdir}, "commit", + "-m", $message, "-u", $user); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } + + return undef; # success +} #}}} + +sub rcs_commit_staged ($$$) { + # Commits all staged changes. Changes can be staged using rcs_add, + # rcs_remove, and rcs_rename. + my ($message, $user, $ipaddr)=@_; + + error("rcs_commit_staged not implemented for mercurial"); # TODO +} + +sub rcs_add ($) { # {{{ + my ($file) = @_; + + my @cmdline = ("hg", "-q", "-R", "$config{srcdir}", "add", "$config{srcdir}/$file"); + if (system(@cmdline) != 0) { + warn "'@cmdline' failed: $!"; + } +} #}}} + +sub rcs_remove ($) { # {{{ + my ($file) = @_; + + error("rcs_remove not implemented for mercurial"); # TODO +} #}}} + +sub rcs_rename ($$) { # {{{ + my ($src, $dest) = @_; + + error("rcs_rename not implemented for mercurial"); # TODO +} #}}} + +sub rcs_recentchanges ($) { #{{{ + my ($num) = @_; + + my @cmdline = ("hg", "-R", $config{srcdir}, "log", "-v", "-l", $num, + "--style", "default"); + open (my $out, "@cmdline |"); + + eval q{use Date::Parse}; + error($@) if $@; + + my @ret; + foreach my $info (mercurial_log($out)) { + my @pages = (); + my @message = (); + + foreach my $msgline (split(/\n/, $info->{description})) { + push @message, { line => $msgline }; + } + + foreach my $file (split / /,$info->{files}) { + my $diffurl = $config{'diffurl'}; + $diffurl =~ s/\[\[file\]\]/$file/go; + $diffurl =~ s/\[\[r2\]\]/$info->{changeset}/go; + + push @pages, { + page => pagename($file), + diffurl => $diffurl, + }; + } + + my $user = $info->{"user"}; + $user =~ s/\s*<.*>\s*$//; + $user =~ s/^\s*//; + + push @ret, { + rev => $info->{"changeset"}, + user => $user, + committype => "mercurial", + when => str2time($info->{"date"}), + message => [@message], + pages => [@pages], + }; + } + + return @ret; +} #}}} + +sub rcs_diff ($) { #{{{ + # TODO +} #}}} + +sub rcs_getctime ($) { #{{{ + my ($file) = @_; + + # XXX filename passes through the shell here, should try to avoid + # that just in case + my @cmdline = ("hg", "-R", $config{srcdir}, "log", "-v", "-l", '1', + "--style", "default", "$config{srcdir}/$file"); + open (my $out, "@cmdline |"); + + my @log = mercurial_log($out); + + if (length @log < 1) { + return 0; + } + + eval q{use Date::Parse}; + error($@) if $@; + + my $ctime = str2time($log[0]->{"date"}); + return $ctime; +} #}}} + +1 diff --git a/IkiWiki/Plugin/mirrorlist.pm b/IkiWiki/Plugin/mirrorlist.pm index 3997e6fef..f7c78fdee 100644 --- a/IkiWiki/Plugin/mirrorlist.pm +++ b/IkiWiki/Plugin/mirrorlist.pm @@ -6,9 +6,21 @@ use strict; use IkiWiki 2.00; sub import { #{{{ + hook(type => "getsetup", id => "mirrorlist", call => \&getsetup); hook(type => "pagetemplate", id => "mirrorlist", call => \&pagetemplate); } # }}} +sub getsetup () { #{{{ + return + mirrorlist => { + type => "string", + example => {}, + description => "list of mirrors", + safe => 1, + rebuild => 1, + }, +} #}}} + sub pagetemplate (@) { #{{{ my %params=@_; my $template=$params{template}; diff --git a/IkiWiki/Plugin/monotone.pm b/IkiWiki/Plugin/monotone.pm new file mode 100644 index 000000000..4b9be316a --- /dev/null +++ b/IkiWiki/Plugin/monotone.pm @@ -0,0 +1,692 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::monotone; + +use warnings; +use strict; +use IkiWiki; +use Monotone; +use Date::Parse qw(str2time); +use Date::Format qw(time2str); + +my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate sha1sums + +sub import { #{{{ + hook(type => "checkconfig", id => "monotone", call => \&checkconfig); + hook(type => "getsetup", id => "monotone", call => \&getsetup); + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub checkconfig () { #{{{ + if (!defined($config{mtnrootdir})) { + $config{mtnrootdir} = $config{srcdir}; + } + if (! -d "$config{mtnrootdir}/_MTN") { + error("Ikiwiki srcdir does not seem to be a Monotone workspace (or set the mtnrootdir)!"); + } + + my $child = open(MTN, "-|"); + if (! $child) { + open STDERR, ">/dev/null"; + exec("mtn", "version") || error("mtn version failed to run"); + } + + my $version=undef; + while () { + if (/^monotone (\d+\.\d+) /) { + $version=$1; + } + } + + close MTN || debug("mtn version exited $?"); + + if (!defined($version)) { + error("Cannot determine monotone version"); + } + if ($version < 0.38) { + error("Monotone version too old, is $version but required 0.38"); + } + + if (length $config{mtn_wrapper}) { + push @{$config{wrappers}}, { + wrapper => $config{mtn_wrapper}, + wrappermode => (defined $config{mtn_wrappermode} ? $config{mtn_wrappermode} : "06755"), + }; + } +} #}}} + +sub getsetup () { #{{{ + return + mtn_wrapper => { + type => "string", + example => "/srv/mtn/wiki/_MTN/ikiwiki-netsync-hook", + description => "monotone netsync hook executable to generate", + safe => 0, # file + rebuild => 0, + }, + mtn_wrappermode => { + type => "string", + example => '06755', + description => "mode for mtn_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + mtnkey => { + type => "string", + example => 'web@example.com', + description => "your monotone key", + safe => 1, + rebuild => 0, + }, + historyurl => { + type => "string", + example => "http://viewmtn.example.com/branch/head/filechanges/com.example.branch/[[file]]", + description => "viewmtn url to show file history ([[file]] substituted)", + safe => 1, + rebuild => 1, + }, + diffurl => { + type => "string", + example => "http://viewmtn.example.com/revision/diff/[[r1]]/with/[[r2]]/[[file]]", + description => "viewmtn url to show a diff ([[r1]], [[r2]], and [[file]] substituted)", + safe => 1, + rebuild => 1, + }, + mtnsync => { + type => "boolean", + example => 0, + description => "sync on update and commit?", + safe => 0, # paranoia + rebuild => 0, + }, + mtnrootdir => { + type => "string", + description => "path to your workspace (defaults to the srcdir; specify if the srcdir is a subdirectory of the workspace)", + safe => 0, # path + rebuild => 0, + }, +} #}}} + +sub get_rev () { #{{{ + my $sha1 = `mtn --root=$config{mtnrootdir} automate get_base_revision_id`; + + ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now + if (! $sha1) { + debug("Unable to get base revision for '$config{srcdir}'.") + } + + return $sha1; +} #}}} + +sub get_rev_auto ($) { #{{{ + my $automator=shift; + + my @results = $automator->call("get_base_revision_id"); + + my $sha1 = $results[0]; + ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now + if (! $sha1) { + debug("Unable to get base revision for '$config{srcdir}'.") + } + + return $sha1; +} #}}} + +sub mtn_merge ($$$$) { #{{{ + my $leftRev=shift; + my $rightRev=shift; + my $branch=shift; + my $author=shift; + + my $mergeRev; + + my $child = open(MTNMERGE, "-|"); + if (! $child) { + open STDERR, ">&STDOUT"; + exec("mtn", "--root=$config{mtnrootdir}", + "explicit_merge", $leftRev, $rightRev, + $branch, "--author", $author, "--key", + $config{mtnkey}) || error("mtn merge failed to run"); + } + + while () { + if (/^mtn.\s.merged.\s($sha1_pattern)$/) { + $mergeRev=$1; + } + } + + close MTNMERGE || return undef; + + debug("merged $leftRev, $rightRev to make $mergeRev"); + + return $mergeRev; +} #}}} + +sub commit_file_to_new_rev ($$$$$$$$) { #{{{ + my $automator=shift; + my $wsfilename=shift; + my $oldFileID=shift; + my $newFileContents=shift; + my $oldrev=shift; + my $branch=shift; + my $author=shift; + my $message=shift; + + #store the file + my ($out, $err) = $automator->call("put_file", $oldFileID, $newFileContents); + my ($newFileID) = ($out =~ m/^($sha1_pattern)$/); + error("Failed to store file data for $wsfilename in repository") + if (! defined $newFileID || length $newFileID != 40); + + # get the mtn filename rather than the workspace filename + ($out, $err) = $automator->call("get_corresponding_path", $oldrev, $wsfilename, $oldrev); + my ($filename) = ($out =~ m/^file "(.*)"$/); + error("Couldn't find monotone repository path for file $wsfilename") if (! $filename); + debug("Converted ws filename of $wsfilename to repos filename of $filename"); + + # then stick in a new revision for this file + my $manifest = "format_version \"1\"\n\n". + "new_manifest [0000000000000000000000000000000000000000]\n\n". + "old_revision [$oldrev]\n\n". + "patch \"$filename\"\n". + " from [$oldFileID]\n". + " to [$newFileID]\n"; + ($out, $err) = $automator->call("put_revision", $manifest); + my ($newRevID) = ($out =~ m/^($sha1_pattern)$/); + error("Unable to make new monotone repository revision") + if (! defined $newRevID || length $newRevID != 40); + debug("put revision: $newRevID"); + + # now we need to add certs for this revision... + # author, branch, changelog, date + $automator->call("cert", $newRevID, "author", $author); + $automator->call("cert", $newRevID, "branch", $branch); + $automator->call("cert", $newRevID, "changelog", $message); + $automator->call("cert", $newRevID, "date", + time2str("%Y-%m-%dT%T", time, "UTC")); + + debug("Added certs for rev: $newRevID"); + return $newRevID; +} #}}} + +sub read_certs ($$) { #{{{ + my $automator=shift; + my $rev=shift; + my @results = $automator->call("certs", $rev); + my @ret; + + my $line = $results[0]; + while ($line =~ m/\s+key\s"(.*?)"\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) { + push @ret, { + key => $1, + signature => $2, + name => $3, + value => $4, + trust => $5, + }; + } + + return @ret; +} #}}} + +sub get_changed_files ($$) { #{{{ + my $automator=shift; + my $rev=shift; + + my @results = $automator->call("get_revision", $rev); + my $changes=$results[0]; + + my @ret; + my %seen = (); + + while ($changes =~ m/\s*(add_file|patch|delete|rename)\s"(.*?)(?new(); + $automator->open_args("--root", $config{mtnrootdir}, "--key", $config{mtnkey}); + + # Something has been committed, has this file changed? + my ($out, $err); + $automator->setOpts("r", $oldrev, "r", $rev); + ($out, $err) = $automator->call("content_diff", $file); + debug("Problem committing $file") if ($err ne ""); + my $diff = $out; + + if ($diff) { + # Commit a revision with just this file changed off + # the old revision. + # + # first get the contents + debug("File changed: forming branch"); + my $newfile=readfile("$config{srcdir}/$file"); + + # then get the old content ID from the diff + if ($diff !~ m/^---\s$file\s+($sha1_pattern)$/m) { + error("Unable to find previous file ID for $file"); + } + my $oldFileID = $1; + + # get the branch we're working in + ($out, $err) = $automator->call("get_option", "branch"); + chomp $out; + error("Illegal branch name in monotone workspace") if ($out !~ m/^([-\@\w\.]+)$/); + my $branch = $1; + + # then put the new content into the DB (and record the new content ID) + my $newRevID = commit_file_to_new_rev($automator, $file, $oldFileID, $newfile, $oldrev, $branch, $author, $message); + + $automator->close(); + + # if we made it to here then the file has been committed... revert the local copy + if (system("mtn", "--root=$config{mtnrootdir}", "revert", $file) != 0) { + debug("Unable to revert $file after merge on conflicted commit!"); + } + debug("Divergence created! Attempting auto-merge."); + + # see if it will merge cleanly + $ENV{MTN_MERGE}="fail"; + my $mergeResult = mtn_merge($newRevID, $rev, $branch, $author); + $ENV{MTN_MERGE}=""; + + # push any changes so far + if (defined($config{mtnsync}) && $config{mtnsync}) { + if (system("mtn", "--root=$config{mtnrootdir}", "push", "--quiet", "--ticker=none", "--key", $config{mtnkey}) != 0) { + debug("monotone push failed"); + } + } + + if (defined($mergeResult)) { + # everything is merged - bring outselves up to date + if (system("mtn", "--root=$config{mtnrootdir}", + "update", "-r", $mergeResult) != 0) { + debug("Unable to update to rev $mergeResult after merge on conflicted commit!"); + } + } + else { + debug("Auto-merge failed. Using diff-merge to add conflict markers."); + + $ENV{MTN_MERGE}="diffutils"; + $ENV{MTN_MERGE_DIFFUTILS}="partial=true"; + $mergeResult = mtn_merge($newRevID, $rev, $branch, $author); + $ENV{MTN_MERGE}=""; + $ENV{MTN_MERGE_DIFFUTILS}=""; + + if (!defined($mergeResult)) { + debug("Unable to insert conflict markers!"); + error("Your commit succeeded. Unfortunately, someone else committed something to the same ". + "part of the wiki at the same time. Both versions are stored in the monotone repository, ". + "but at present the different versions cannot be reconciled through the web interface. ". + "Please use the non-web interface to resolve the conflicts."); + } + + if (system("mtn", "--root=$config{mtnrootdir}", + "update", "-r", $mergeResult) != 0) { + debug("Unable to update to rev $mergeResult after conflict-enhanced merge on conflicted commit!"); + } + + # return "conflict enhanced" file to the user + # for cleanup note, this relies on the fact + # that ikiwiki seems to call rcs_prepedit() + # again after we return + return readfile("$config{srcdir}/$file"); + } + return undef; + } + $automator->close(); + } + + # If we reached here then the file we're looking at hasn't changed + # since $oldrev. Commit it. + + if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet", + "--author", $author, "--key", $config{mtnkey}, "-m", + IkiWiki::possibly_foolish_untaint($message), $file) != 0) { + debug("Traditional commit failed! Returning data as conflict."); + my $conflict=readfile("$config{srcdir}/$file"); + if (system("mtn", "--root=$config{mtnrootdir}", "revert", + "--quiet", $file) != 0) { + debug("monotone revert failed"); + } + return $conflict; + } + if (defined($config{mtnsync}) && $config{mtnsync}) { + if (system("mtn", "--root=$config{mtnrootdir}", "push", + "--quiet", "--ticker=none", "--key", + $config{mtnkey}) != 0) { + debug("monotone push failed"); + } + } + + return undef # success +} #}}} + +sub rcs_commit_staged ($$$) { + # Commits all staged changes. Changes can be staged using rcs_add, + # rcs_remove, and rcs_rename. + my ($message, $user, $ipaddr)=@_; + + # Note - this will also commit any spurious changes that happen to be + # lying around in the working copy. There shouldn't be any, but... + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + my $author; + + if (defined $user) { + $author="Web user: " . $user; + } + elsif (defined $ipaddr) { + $author="Web IP: " . $ipaddr; + } + else { + $author="Web: Anonymous"; + } + + if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet", + "--author", $author, "--key", $config{mtnkey}, "-m", + IkiWiki::possibly_foolish_untaint($message)) != 0) { + error("Monotone commit failed"); + } +} + +sub rcs_add ($) { #{{{ + my $file=shift; + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + if (system("mtn", "--root=$config{mtnrootdir}", "add", "--quiet", + $file) != 0) { + error("Monotone add failed"); + } +} #}}} + +sub rcs_remove ($) { # {{{ + my $file = shift; + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + # Note: it is difficult to undo a remove in Monotone at the moment. + # Until this is fixed, it might be better to make 'rm' move things + # into an attic, rather than actually remove them. + # To resurrect a file, you currently add a new file with the contents + # you want it to have. This loses all connectivity and automated + # merging with the 'pre-delete' versions of the file. + + if (system("mtn", "--root=$config{mtnrootdir}", "rm", "--quiet", + $file) != 0) { + error("Monotone remove failed"); + } +} #}}} + +sub rcs_rename ($$) { # {{{ + my ($src, $dest) = @_; + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + if (system("mtn", "--root=$config{mtnrootdir}", "rename", "--quiet", + $src, $dest) != 0) { + error("Monotone rename failed"); + } +} #}}} + +sub rcs_recentchanges ($) { #{{{ + my $num=shift; + my @ret; + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + # use log --brief to get a list of revs, as this + # gives the results in a nice order + # (otherwise we'd have to do our own date sorting) + + my @revs; + + my $child = open(MTNLOG, "-|"); + if (! $child) { + exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", + "--brief") || error("mtn log failed to run"); + } + + while (($num >= 0) and (my $line = )) { + if ($line =~ m/^($sha1_pattern)/) { + push @revs, $1; + $num -= 1; + } + } + close MTNLOG || debug("mtn log exited $?"); + + my $automator = Monotone->new(); + $automator->open(undef, $config{mtnrootdir}); + + while (@revs != 0) { + my $rev = shift @revs; + # first go through and figure out the messages, etc + + my $certs = [read_certs($automator, $rev)]; + + my $user; + my $when; + my $committype; + my (@pages, @message); + + foreach my $cert (@$certs) { + if ($cert->{signature} eq "ok" && + $cert->{trust} eq "trusted") { + if ($cert->{name} eq "author") { + $user = $cert->{value}; + # detect the source of the commit + # from the changelog + if ($cert->{key} eq $config{mtnkey}) { + $committype = "web"; + } else { + $committype = "monotone"; + } + } elsif ($cert->{name} eq "date") { + $when = str2time($cert->{value}, 'UTC'); + } elsif ($cert->{name} eq "changelog") { + my $messageText = $cert->{value}; + # split the changelog into multiple + # lines + foreach my $msgline (split(/\n/, $messageText)) { + push @message, { line => $msgline }; + } + } + } + } + + my @changed_files = get_changed_files($automator, $rev); + my $file; + + my ($out, $err) = $automator->call("parents", $rev); + my @parents = ($out =~ m/^($sha1_pattern)$/); + my $parent = $parents[0]; + + foreach $file (@changed_files) { + next unless length $file; + + if (defined $config{diffurl} and (@parents == 1)) { + my $diffurl=$config{diffurl}; + $diffurl=~s/\[\[r1\]\]/$parent/g; + $diffurl=~s/\[\[r2\]\]/$rev/g; + $diffurl=~s/\[\[file\]\]/$file/g; + push @pages, { + page => pagename($file), + diffurl => $diffurl, + }; + } + else { + push @pages, { + page => pagename($file), + } + } + } + + push @ret, { + rev => $rev, + user => $user, + committype => $committype, + when => $when, + message => [@message], + pages => [@pages], + } if @pages; + } + + $automator->close(); + + return @ret; +} #}}} + +sub rcs_diff ($) { #{{{ + my $rev=shift; + my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + my $child = open(MTNDIFF, "-|"); + if (! $child) { + exec("mtn", "diff", "--root=$config{mtnrootdir}", "-r", "p:".$sha1, "-r", $sha1) || error("mtn diff $sha1 failed to run"); + } + + my (@lines) = ; + + close MTNDIFF || debug("mtn diff $sha1 exited $?"); + + if (wantarray) { + return @lines; + } + else { + return join("", @lines); + } +} #}}} + +sub rcs_getctime ($) { #{{{ + my $file=shift; + + chdir $config{srcdir} + or error("Cannot chdir to $config{srcdir}: $!"); + + my $child = open(MTNLOG, "-|"); + if (! $child) { + exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", + "--brief", $file) || error("mtn log $file failed to run"); + } + + my $firstRev; + while () { + if (/^($sha1_pattern)/) { + $firstRev=$1; + } + } + close MTNLOG || debug("mtn log $file exited $?"); + + if (! defined $firstRev) { + debug "failed to parse mtn log for $file"; + return 0; + } + + my $automator = Monotone->new(); + $automator->open(undef, $config{mtnrootdir}); + + my $certs = [read_certs($automator, $firstRev)]; + + $automator->close(); + + my $date; + + foreach my $cert (@$certs) { + if ($cert->{signature} eq "ok" && $cert->{trust} eq "trusted") { + if ($cert->{name} eq "date") { + $date = $cert->{value}; + } + } + } + + if (! defined $date) { + debug "failed to find date cert for revision $firstRev when looking for creation time of $file"; + return 0; + } + + $date=str2time($date, 'UTC'); + debug("found ctime ".localtime($date)." for $file"); + return $date; +} #}}} + +1 diff --git a/IkiWiki/Plugin/norcs.pm b/IkiWiki/Plugin/norcs.pm new file mode 100644 index 000000000..72c66569c --- /dev/null +++ b/IkiWiki/Plugin/norcs.pm @@ -0,0 +1,58 @@ +#!/usr/bin/perl +# Stubs for no revision control. +package IkiWiki::Plugin::norcs; + +use warnings; +use strict; +use IkiWiki; + +sub import { #{{{ + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub rcs_update () { #{{{ +} #}}} + +sub rcs_prepedit ($) { #{{{ + return "" +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + my ($file, $message, $rcstoken, $user, $ipaddr) = @_; + return undef # success +} #}}} + +sub rcs_commit_staged ($$$) { #{{{ + my ($message, $user, $ipaddr)=@_; + return undef # success +} #}}} + +sub rcs_add ($) { #{{{ +} #}}} + +sub rcs_remove ($) { #{{{ +} #}}} + +sub rcs_rename ($$) { #{{{ +} #}}} + +sub rcs_recentchanges ($) { #{{{ +} #}}} + +sub rcs_diff ($) { #{{{ +} #}}} + +sub rcs_getctime ($) { #{{{ + error gettext("getctime not implemented"); +} #}}} + +1 diff --git a/IkiWiki/Plugin/openid.pm b/IkiWiki/Plugin/openid.pm index 10a8fa22f..de7f7280e 100644 --- a/IkiWiki/Plugin/openid.pm +++ b/IkiWiki/Plugin/openid.pm @@ -8,6 +8,7 @@ use IkiWiki 2.00; sub import { #{{{ hook(type => "getopt", id => "openid", call => \&getopt); + hook(type => "getsetup", id => "openid", call => \&getsetup); hook(type => "auth", id => "openid", call => \&auth); hook(type => "formbuilder_setup", id => "openid", call => \&formbuilder_setup, last => 1); @@ -20,6 +21,17 @@ sub getopt () { #{{{ GetOptions("openidsignup=s" => \$config{openidsignup}); } #}}} +sub getsetup () { #{{{ + return + openidsignup => { + type => "string", + example => "http://myopenid.com/", + description => "an url where users can signup for an OpenID", + safe => 1, + rebuild => 0, + }, +} #}}} + sub formbuilder_setup (@) { #{{{ my %params=@_; diff --git a/IkiWiki/Plugin/passwordauth.pm b/IkiWiki/Plugin/passwordauth.pm index f3f1aa4bf..82afeef98 100644 --- a/IkiWiki/Plugin/passwordauth.pm +++ b/IkiWiki/Plugin/passwordauth.pm @@ -7,13 +7,30 @@ use strict; use IkiWiki 2.00; sub import { #{{{ - hook(type => "formbuilder_setup", id => "passwordauth", - call => \&formbuilder_setup); - hook(type => "formbuilder", id => "passwordauth", - call => \&formbuilder); + hook(type => "getsetup", id => "passwordauth", "call" => \&getsetup); + hook(type => "formbuilder_setup", id => "passwordauth", call => \&formbuilder_setup); + hook(type => "formbuilder", id => "passwordauth", call => \&formbuilder); hook(type => "sessioncgi", id => "passwordauth", call => \&sessioncgi); } # }}} +sub getsetup () { #{{{ + return + account_creation_password => { + type => "string", + example => "s3cr1t", + description => "a password that must be entered when signing up for an account", + safe => 1, + rebuild => 0, + }, + password_cost => { + type => "integer", + example => 8, + description => "cost of generating a password using Authen::Passphrase::BlowfishCrypt", + safe => 1, + rebuild => 0, + }, +} #}}} + # Checks if a string matches a user's password, and returns true or false. sub checkpassword ($$;$) { #{{{ my $user=shift; @@ -88,7 +105,9 @@ sub formbuilder_setup (@) { #{{{ if ($form->submitted eq "Register" || $form->submitted eq "Create Account") { $form->field(name => "confirm_password", type => "password"); - $form->field(name => "account_creation_password", type => "password") if (length $config{account_creation_password}); + $form->field(name => "account_creation_password", type => "password") + if (defined $config{account_creation_password} && + length $config{account_creation_password}); $form->field(name => "email", size => 50); $form->title("register"); $form->text(""); @@ -125,7 +144,8 @@ sub formbuilder_setup (@) { #{{{ shift eq $config{account_creation_password}; }, required => 1, - ) if (length $config{account_creation_password}); + ) if (defined $config{account_creation_password} && + length $config{account_creation_password}); $form->field( name => "email", validate => "EMAIL", @@ -259,7 +279,9 @@ sub formbuilder (@) { #{{{ error($@) if $@; sendmail( To => IkiWiki::userinfo_get($user_name, "email"), - From => "$config{wikiname} admin <$config{adminemail}>", + From => "$config{wikiname} admin <". + (defined $config{adminemail} ? $config{adminemail} : "") + .">", Subject => "$config{wikiname} information", Message => $template->output, ) or error(gettext("Failed to send mail")); diff --git a/IkiWiki/Plugin/pinger.pm b/IkiWiki/Plugin/pinger.pm index 614d42885..e72833b8f 100644 --- a/IkiWiki/Plugin/pinger.pm +++ b/IkiWiki/Plugin/pinger.pm @@ -9,12 +9,24 @@ my %pages; my $pinged=0; sub import { #{{{ + hook(type => "getsetup", id => "pinger", call => \&getsetup); hook(type => "needsbuild", id => "pinger", call => \&needsbuild); hook(type => "preprocess", id => "ping", call => \&preprocess); hook(type => "delete", id => "pinger", call => \&ping); hook(type => "change", id => "pinger", call => \&ping); } # }}} +sub getsetup () { #{{{ + return + pinger_timeout => { + type => "integer", + example => 15, + description => "how many seconds to try pinging before timing out", + safe => 1, + rebuild => 0, + }, +} #}}} + sub needsbuild (@) { #{{{ my $needsbuild=shift; foreach my $page (keys %pagestate) { diff --git a/IkiWiki/Plugin/prettydate.pm b/IkiWiki/Plugin/prettydate.pm index 745e6a1de..db5a94f41 100644 --- a/IkiWiki/Plugin/prettydate.pm +++ b/IkiWiki/Plugin/prettydate.pm @@ -40,9 +40,27 @@ sub default_timetable { } sub import { #{{{ + hook(type => "getsetup", id => "prettydate", call => \&getsetup); hook(type => "checkconfig", id => "prettydate", call => \&checkconfig); } # }}} +sub getsetup () { #{{{ + return + prettydateformat => { + type => "string", + example => '%X, %B %o, %Y', + description => "format to use to display date", + safe => 1, + rebuild => 1, + }, + timetable => { + type => "internal", + description => "array of time descriptions", + safe => 1, + rebuild => 1, + }, +} #}}} + sub checkconfig () { #{{{ if (! defined $config{prettydateformat} || $config{prettydateformat} eq '%c') { diff --git a/IkiWiki/Plugin/recentchanges.pm b/IkiWiki/Plugin/recentchanges.pm index 8383fb72a..d534d0cd9 100644 --- a/IkiWiki/Plugin/recentchanges.pm +++ b/IkiWiki/Plugin/recentchanges.pm @@ -6,6 +6,7 @@ use strict; use IkiWiki 2.00; sub import { #{{{ + hook(type => "getsetup", id => "recentchanges", call => \&getsetup); hook(type => "checkconfig", id => "recentchanges", call => \&checkconfig); hook(type => "refresh", id => "recentchanges", call => \&refresh); hook(type => "pagetemplate", id => "recentchanges", call => \&pagetemplate); @@ -13,6 +14,24 @@ sub import { #{{{ hook(type => "cgi", id => "recentchanges", call => \&cgi); } #}}} +sub getsetup () { #{{{ + return + recentchangespage => { + type => "string", + example => "recentchanges", + description => "name of the recentchanges page", + safe => 1, + rebuild => 1, + }, + recentchangesnum => { + type => "integer", + example => 100, + description => "number of changes to track", + safe => 1, + rebuild => 0, + }, +} #}}} + sub checkconfig () { #{{{ $config{recentchangespage}='recentchanges' unless defined $config{recentchangespage}; $config{recentchangesnum}=100 unless defined $config{recentchangesnum}; diff --git a/IkiWiki/Plugin/search.pm b/IkiWiki/Plugin/search.pm index eedfa6924..cb12d9500 100644 --- a/IkiWiki/Plugin/search.pm +++ b/IkiWiki/Plugin/search.pm @@ -7,6 +7,7 @@ use strict; use IkiWiki 2.00; sub import { #{{{ + hook(type => "getsetup", id => "search", call => \&getsetup); hook(type => "checkconfig", id => "search", call => \&checkconfig); hook(type => "pagetemplate", id => "search", call => \&pagetemplate); hook(type => "postscan", id => "search", call => \&index); @@ -14,6 +15,17 @@ sub import { #{{{ hook(type => "cgi", id => "search", call => \&cgi); } # }}} +sub getsetup () { #{{{ + return + omega_cgi => { + type => "string", + example => "/usr/lib/cgi-bin/omega/omega", + description => "path to the omega cgi program", + safe => 0, # external program + rebuild => 0, + }, +} #}}} + sub checkconfig () { #{{{ foreach my $required (qw(url cgiurl)) { if (! length $config{$required}) { @@ -21,7 +33,7 @@ sub checkconfig () { #{{{ } } - if (! exists $config{omega_cgi}) { + if (! defined $config{omega_cgi}) { $config{omega_cgi}="/usr/lib/cgi-bin/omega/omega"; } } #}}} diff --git a/IkiWiki/Plugin/shortcut.pm b/IkiWiki/Plugin/shortcut.pm index 8df60cfe2..dfc3cd7c7 100644 --- a/IkiWiki/Plugin/shortcut.pm +++ b/IkiWiki/Plugin/shortcut.pm @@ -6,11 +6,11 @@ use strict; use IkiWiki 2.00; sub import { #{{{ - hook(type => "checkconfig", id => "shortcut", call => \&checkconfig); + hook(type => "refresh", id => "shortcut", call => \&refresh); hook(type => "preprocess", id => "shortcut", call => \&preprocess_shortcut); } #}}} -sub checkconfig () { #{{{ +sub refresh () { #{{{ # Preprocess the shortcuts page to get all the available shortcuts # defined before other pages are rendered. my $srcfile=srcfile("shortcuts.mdwn", 1); diff --git a/IkiWiki/Plugin/skeleton.pm.example b/IkiWiki/Plugin/skeleton.pm.example index 1af8e4e9d..49c4d88f2 100644 --- a/IkiWiki/Plugin/skeleton.pm.example +++ b/IkiWiki/Plugin/skeleton.pm.example @@ -10,6 +10,7 @@ use IkiWiki 2.00; sub import { #{{{ hook(type => "getopt", id => "skeleton", call => \&getopt); + hook(type => "getsetup", id => "skeleton", call => \&getsetup); hook(type => "checkconfig", id => "skeleton", call => \&checkconfig); hook(type => "needsbuild", id => "skeleton", call => \&needsbuild); hook(type => "preprocess", id => "skeleton", call => \&preprocess); @@ -38,6 +39,17 @@ sub getopt () { #{{{ debug("skeleton plugin getopt"); } #}}} +sub getsetup () { #{{{ + return + skeleton => { + type => "boolean", + example => 0, + description => "example option", + safe => 0, + rebuild => 0, + }, +} #}}} + sub checkconfig () { #{{{ debug("skeleton plugin checkconfig"); } #}}} diff --git a/IkiWiki/Plugin/svn.pm b/IkiWiki/Plugin/svn.pm new file mode 100644 index 000000000..05312a1ed --- /dev/null +++ b/IkiWiki/Plugin/svn.pm @@ -0,0 +1,380 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::svn; + +use warnings; +use strict; +use IkiWiki; +use POSIX qw(setlocale LC_CTYPE); + +sub import { #{{{ + hook(type => "checkconfig", id => "svn", call => \&checkconfig); + hook(type => "getsetup", id => "svn", call => \&getsetup); + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub checkconfig () { #{{{ + if (! defined $config{diffurl}) { + $config{diffurl}=""; + } + if (! defined $config{svnpath}) { + $config{svnpath}="trunk"; + } + if (exists $config{svnpath}) { + # code depends on the path not having extraneous slashes + $config{svnpath}=~tr#/#/#s; + $config{svnpath}=~s/\/$//; + $config{svnpath}=~s/^\///; + } + if (defined $config{svn_wrapper} && length $config{svn_wrapper}) { + push @{$config{wrappers}}, { + wrapper => $config{svn_wrapper}, + wrappermode => (defined $config{svn_wrappermode} ? $config{svn_wrappermode} : "04755"), + }; + } +} #}}} + +sub getsetup () { #{{{ + return + svnrepo => { + type => "string", + example => "/svn/wiki", + description => "subversion repository location", + safe => 0, # path + rebuild => 0, + }, + svnpath => { + type => "string", + example => "trunk", + description => "path inside repository where the wiki is located", + safe => 0, # paranoia + rebuild => 0, + }, + svn_wrapper => { + type => "string", + example => "/svn/wikirepo/hooks/post-commit", + description => "svn post-commit executable to generate", + safe => 0, # file + rebuild => 0, + }, + svn_wrappermode => { + type => "string", + example => '04755', + description => "mode for svn_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + historyurl => { + type => "string", + example => "http://svn.example.org/trunk/[[file]]", + description => "viewvc url to show file history ([[file]] substituted)", + safe => 1, + rebuild => 1, + }, + diffurl => { + type => "string", + example => "http://svn.example.org/trunk/[[file]]?root=wiki&r1=[[r1]]&r2=[[r2]]", + description => "viewvc url to show a diff ([[file]], [[r1]], and [[r2]] substituted)", + safe => 1, + rebuild => 1, + }, +} #}}} + +# svn needs LC_CTYPE set to a UTF-8 locale, so try to find one. Any will do. +sub find_lc_ctype() { + my $current = setlocale(LC_CTYPE()); + return $current if $current =~ m/UTF-?8$/i; + + # Make some obvious attempts to avoid calling `locale -a` + foreach my $locale ("$current.UTF-8", "en_US.UTF-8", "en_GB.UTF-8") { + return $locale if setlocale(LC_CTYPE(), $locale); + } + + # Try to get all available locales and pick the first UTF-8 one found. + if (my @locale = grep(/UTF-?8$/i, `locale -a`)) { + chomp @locale; + return $locale[0] if setlocale(LC_CTYPE(), $locale[0]); + } + + # fallback to the current locale + return $current; +} # }}} +$ENV{LC_CTYPE} = $ENV{LC_CTYPE} || find_lc_ctype(); + +sub svn_info ($$) { #{{{ + my $field=shift; + my $file=shift; + + my $info=`LANG=C svn info $file`; + my ($ret)=$info=~/^$field: (.*)$/m; + return $ret; +} #}}} + +sub rcs_update () { #{{{ + if (-d "$config{srcdir}/.svn") { + if (system("svn", "update", "--quiet", $config{srcdir}) != 0) { + warn("svn update failed\n"); + } + } +} #}}} + +sub rcs_prepedit ($) { #{{{ + # Prepares to edit a file under revision control. Returns a token + # that must be passed into rcs_commit when the file is ready + # for committing. + # The file is relative to the srcdir. + my $file=shift; + + if (-d "$config{srcdir}/.svn") { + # For subversion, return the revision of the file when + # editing begins. + my $rev=svn_info("Revision", "$config{srcdir}/$file"); + return defined $rev ? $rev : ""; + } +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + # Tries to commit the page; returns undef on _success_ and + # a version of the page with the rcs's conflict markers on failure. + # The file is relative to the srcdir. + my $file=shift; + my $message=shift; + my $rcstoken=shift; + my $user=shift; + my $ipaddr=shift; + + if (defined $user) { + $message="web commit by $user".(length $message ? ": $message" : ""); + } + elsif (defined $ipaddr) { + $message="web commit from $ipaddr".(length $message ? ": $message" : ""); + } + + if (-d "$config{srcdir}/.svn") { + # Check to see if the page has been changed by someone + # else since rcs_prepedit was called. + my ($oldrev)=$rcstoken=~/^([0-9]+)$/; # untaint + my $rev=svn_info("Revision", "$config{srcdir}/$file"); + if (defined $rev && defined $oldrev && $rev != $oldrev) { + # Merge their changes into the file that we've + # changed. + if (system("svn", "merge", "--quiet", "-r$oldrev:$rev", + "$config{srcdir}/$file", "$config{srcdir}/$file") != 0) { + warn("svn merge -r$oldrev:$rev failed\n"); + } + } + + if (system("svn", "commit", "--quiet", + "--encoding", "UTF-8", "-m", + IkiWiki::possibly_foolish_untaint($message), + $config{srcdir}) != 0) { + my $conflict=readfile("$config{srcdir}/$file"); + if (system("svn", "revert", "--quiet", "$config{srcdir}/$file") != 0) { + warn("svn revert failed\n"); + } + return $conflict; + } + } + return undef # success +} #}}} + +sub rcs_commit_staged ($$$) { + # Commits all staged changes. Changes can be staged using rcs_add, + # rcs_remove, and rcs_rename. + my ($message, $user, $ipaddr)=@_; + + if (defined $user) { + $message="web commit by $user".(length $message ? ": $message" : ""); + } + elsif (defined $ipaddr) { + $message="web commit from $ipaddr".(length $message ? ": $message" : ""); + } + + if (system("svn", "commit", "--quiet", + "--encoding", "UTF-8", "-m", + IkiWiki::possibly_foolish_untaint($message), + $config{srcdir}) != 0) { + warn("svn commit failed\n"); + return 1; # failure + } + return undef # success +} + +sub rcs_add ($) { #{{{ + # filename is relative to the root of the srcdir + my $file=shift; + + if (-d "$config{srcdir}/.svn") { + my $parent=IkiWiki::dirname($file); + while (! -d "$config{srcdir}/$parent/.svn") { + $file=$parent; + $parent=IkiWiki::dirname($file); + } + + if (system("svn", "add", "--quiet", "$config{srcdir}/$file") != 0) { + warn("svn add failed\n"); + } + } +} #}}} + +sub rcs_remove ($) { #{{{ + # filename is relative to the root of the srcdir + my $file=shift; + + if (-d "$config{srcdir}/.svn") { + if (system("svn", "rm", "--force", "--quiet", "$config{srcdir}/$file") != 0) { + warn("svn rm failed\n"); + } + } +} #}}} + +sub rcs_rename ($$) { #{{{ + # filenames relative to the root of the srcdir + my ($src, $dest)=@_; + + if (-d "$config{srcdir}/.svn") { + # Add parent directory for $dest + my $parent=dirname($dest); + if (! -d "$config{srcdir}/$parent/.svn") { + while (! -d "$config{srcdir}/$parent/.svn") { + $parent=dirname($dest); + } + if (system("svn", "add", "--quiet", "$config{srcdir}/$parent") != 0) { + warn("svn add $parent failed\n"); + } + } + + if (system("svn", "mv", "--force", "--quiet", + "$config{srcdir}/$src", "$config{srcdir}/$dest") != 0) { + warn("svn rename failed\n"); + } + } +} #}}} + +sub rcs_recentchanges ($) { #{{{ + my $num=shift; + my @ret; + + return unless -d "$config{srcdir}/.svn"; + + eval q{ + use Date::Parse; + use XML::SAX; + use XML::Simple; + }; + error($@) if $@; + + # avoid using XML::SAX::PurePerl, it's buggy with UTF-8 data + my @parsers = map { ${$_}{Name} } @{XML::SAX->parsers()}; + do { + $XML::Simple::PREFERRED_PARSER = pop @parsers; + } until $XML::Simple::PREFERRED_PARSER ne 'XML::SAX::PurePerl'; + + # --limit is only supported on Subversion 1.2.0+ + my $svn_version=`svn --version -q`; + my $svn_limit=''; + $svn_limit="--limit $num" + if $svn_version =~ /\d\.(\d)\.\d/ && $1 >= 2; + + my $svn_url=svn_info("URL", $config{srcdir}); + my $xml = XMLin(scalar `svn $svn_limit --xml -v log '$svn_url'`, + ForceArray => [ 'logentry', 'path' ], + GroupTags => { paths => 'path' }, + KeyAttr => { path => 'content' }, + ); + foreach my $logentry (@{$xml->{logentry}}) { + my (@pages, @message); + + my $rev = $logentry->{revision}; + my $user = $logentry->{author}; + + my $when=str2time($logentry->{date}, 'UTC'); + + foreach my $msgline (split(/\n/, $logentry->{msg})) { + push @message, { line => $msgline }; + } + + my $committype="web"; + if (defined $message[0] && + $message[0]->{line}=~/$config{web_commit_regexp}/) { + $user=defined $2 ? "$2" : "$3"; + $message[0]->{line}=$4; + } + else { + $committype="svn"; + } + + foreach my $file (keys %{$logentry->{paths}}) { + if (length $config{svnpath}) { + next unless $file=~/^\/\Q$config{svnpath}\E\/([^ ]+)(?:$|\s)/; + $file=$1; + } + + my $diffurl=$config{diffurl}; + $diffurl=~s/\[\[file\]\]/$file/g; + $diffurl=~s/\[\[r1\]\]/$rev - 1/eg; + $diffurl=~s/\[\[r2\]\]/$rev/g; + + push @pages, { + page => pagename($file), + diffurl => $diffurl, + } if length $file; + } + push @ret, { + rev => $rev, + user => $user, + committype => $committype, + when => $when, + message => [@message], + pages => [@pages], + } if @pages; + return @ret if @ret >= $num; + } + + return @ret; +} #}}} + +sub rcs_diff ($) { #{{{ + my $rev=IkiWiki::possibly_foolish_untaint(int(shift)); + return `svnlook diff $config{svnrepo} -r$rev --no-diff-deleted`; +} #}}} + +sub rcs_getctime ($) { #{{{ + my $file=shift; + + my $svn_log_infoline=qr/^r\d+\s+\|\s+[^\s]+\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/; + + my $child = open(SVNLOG, "-|"); + if (! $child) { + exec("svn", "log", $file) || error("svn log $file failed to run"); + } + + my $date; + while () { + if (/$svn_log_infoline/) { + $date=$1; + } + } + close SVNLOG || warn "svn log $file exited $?"; + + if (! defined $date) { + warn "failed to parse svn log for $file\n"; + return 0; + } + + eval q{use Date::Parse}; + error($@) if $@; + $date=str2time($date); + debug("found ctime ".localtime($date)." for $file"); + return $date; +} #}}} + +1 diff --git a/IkiWiki/Plugin/tag.pm b/IkiWiki/Plugin/tag.pm index b0a0e53be..36b434f67 100644 --- a/IkiWiki/Plugin/tag.pm +++ b/IkiWiki/Plugin/tag.pm @@ -10,6 +10,7 @@ my %tags; sub import { #{{{ hook(type => "getopt", id => "tag", call => \&getopt); + hook(type => "getsetup", id => "tag", call => \&getsetup); hook(type => "preprocess", id => "tag", call => \&preprocess_tag, scan => 1); hook(type => "preprocess", id => "taglink", call => \&preprocess_taglink, scan => 1); hook(type => "pagetemplate", id => "tag", call => \&pagetemplate); @@ -22,6 +23,17 @@ sub getopt () { #{{{ GetOptions("tagbase=s" => \$config{tagbase}); } #}}} +sub getsetup () { #{{{ + return + tagbase => { + type => "string", + example => "tag", + description => "parent page tags are located under", + safe => 1, + rebuild => 1, + }, +} #}}} + sub tagpage ($) { #{{{ my $tag=shift; diff --git a/IkiWiki/Plugin/tla.pm b/IkiWiki/Plugin/tla.pm new file mode 100644 index 000000000..b95c1a522 --- /dev/null +++ b/IkiWiki/Plugin/tla.pm @@ -0,0 +1,285 @@ +#!/usr/bin/perl +package IkiWiki::Plugin::tla; + +use warnings; +use strict; +use IkiWiki; + +sub import { #{{{ + hook(type => "checkconfig", id => "tla", call => \&checkconfig); + hook(type => "getsetup", id => "tla", call => \&getsetup); + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + hook(type => "rcs", id => "rcs_commit", call => \&rcs_commit); + hook(type => "rcs", id => "rcs_commit_staged", call => \&rcs_commit_staged); + hook(type => "rcs", id => "rcs_add", call => \&rcs_add); + hook(type => "rcs", id => "rcs_remove", call => \&rcs_remove); + hook(type => "rcs", id => "rcs_rename", call => \&rcs_rename); + hook(type => "rcs", id => "rcs_recentchanges", call => \&rcs_recentchanges); + hook(type => "rcs", id => "rcs_diff", call => \&rcs_diff); + hook(type => "rcs", id => "rcs_getctime", call => \&rcs_getctime); +} #}}} + +sub checkconfig () { #{{{ + if (! defined $config{diffurl}) { + $config{diffurl}=""; + } + if (defined $config{tla_wrapper} && length $config{tla_wrapper}) { + push @{$config{wrappers}}, { + wrapper => $config{tla_wrapper}, + wrappermode => (defined $config{tla_wrappermode} ? $config{tla_wrappermode} : "06755"), + }; + } +} #}}} + +sub getsetup () { #{{{ + return + tla_wrapper => { + type => "string", + #example => "", # TODO example + description => "tla post-commit executable to generate", + safe => 0, # file + rebuild => 0, + }, + tla_wrappermode => { + type => "string", + example => '06755', + description => "mode for tla_wrapper (can safely be made suid)", + safe => 0, + rebuild => 0, + }, + historyurl => { + type => "string", + #example => "", # TODO example + description => "url to show file history ([[file]] substituted)", + safe => 1, + rebuild => 1, + }, + diffurl => { + type => "string", + #example => "", # TODO example + description => "url to show a diff ([[file]] and [[rev]] substituted)", + safe => 1, + rebuild => 1, + }, +} #}}} + +sub quiet_system (@) { #{{{ + # See Debian bug #385939. + open (SAVEOUT, ">&STDOUT"); + close STDOUT; + open (STDOUT, ">/dev/null"); + my $ret=system(@_); + close STDOUT; + open (STDOUT, ">&SAVEOUT"); + close SAVEOUT; + return $ret; +} #}}} + +sub rcs_update () { #{{{ + if (-d "$config{srcdir}/{arch}") { + if (quiet_system("tla", "replay", "-d", $config{srcdir}) != 0) { + warn("tla replay failed\n"); + } + } +} #}}} + +sub rcs_prepedit ($) { #{{{ + my $file=shift; + + if (-d "$config{srcdir}/{arch}") { + # For Arch, return the tree-id of archive when + # editing begins. + my $rev=`tla tree-id $config{srcdir}`; + return defined $rev ? $rev : ""; + } +} #}}} + +sub rcs_commit ($$$;$$) { #{{{ + my $file=shift; + my $message=shift; + my $rcstoken=shift; + my $user=shift; + my $ipaddr=shift; + + if (defined $user) { + $message="web commit by $user".(length $message ? ": $message" : ""); + } + elsif (defined $ipaddr) { + $message="web commit from $ipaddr".(length $message ? ": $message" : ""); + } + + if (-d "$config{srcdir}/{arch}") { + # Check to see if the page has been changed by someone + # else since rcs_prepedit was called. + my ($oldrev)=$rcstoken=~/^([A-Za-z0-9@\/._-]+)$/; # untaint + my $rev=`tla tree-id $config{srcdir}`; + if (defined $rev && defined $oldrev && $rev ne $oldrev) { + # Merge their changes into the file that we've + # changed. + if (quiet_system("tla", "update", "-d", + "$config{srcdir}") != 0) { + warn("tla update failed\n"); + } + } + + if (quiet_system("tla", "commit", + "-L".IkiWiki::possibly_foolish_untaint($message), + '-d', $config{srcdir}) != 0) { + my $conflict=readfile("$config{srcdir}/$file"); + if (system("tla", "undo", "-n", "--quiet", "-d", "$config{srcdir}") != 0) { + warn("tla undo failed\n"); + } + return $conflict; + } + } + return undef # success +} #}}} + +sub rcs_commit_staged ($$$) { + # Commits all staged changes. Changes can be staged using rcs_add, + # rcs_remove, and rcs_rename. + my ($message, $user, $ipaddr)=@_; + + error("rcs_commit_staged not implemented for tla"); # TODO +} + +sub rcs_add ($) { #{{{ + my $file=shift; + + if (-d "$config{srcdir}/{arch}") { + if (quiet_system("tla", "add", "$config{srcdir}/$file") != 0) { + warn("tla add failed\n"); + } + } +} #}}} + +sub rcs_remove ($) { # {{{ + my $file = shift; + + error("rcs_remove not implemented for tla"); # TODO +} #}}} + +sub rcs_rename ($$) { # {{{a + my ($src, $dest) = @_; + + error("rcs_rename not implemented for tla"); # TODO +} #}}} + +sub rcs_recentchanges ($) { + my $num=shift; + my @ret; + + return unless -d "$config{srcdir}/{arch}"; + + eval q{use Date::Parse}; + error($@) if $@; + eval q{use Mail::Header}; + error($@) if $@; + + my $logs = `tla logs -d $config{srcdir}`; + my @changesets = reverse split(/\n/, $logs); + + for (my $i=0; $i<$num && $i<$#changesets; $i++) { + my ($change)=$changesets[$i]=~/^([A-Za-z0-9@\/._-]+)$/; # untaint + + open(LOG, "tla cat-log -d $config{srcdir} $change|"); + my $head = Mail::Header->new(\*LOG); + close(LOG); + + my $rev = $head->get("Revision"); + my $summ = $head->get("Summary"); + my $newfiles = $head->get("New-files"); + my $modfiles = $head->get("Modified-files"); + my $remfiles = $head->get("Removed-files"); + my $user = $head->get("Creator"); + + my @paths = grep { !/^(.*\/)?\.arch-ids\/.*\.id$/ } + split(/ /, "$newfiles $modfiles .arch-ids/fake.id"); + + my $sdate = $head->get("Standard-date"); + my $when = str2time($sdate, 'UTC'); + + my $committype = "web"; + if (defined $summ && $summ =~ /$config{web_commit_regexp}/) { + $user = defined $2 ? "$2" : "$3"; + $summ = $4; + } + else { + $committype="tla"; + } + + my @message; + push @message, { line => $summ }; + + my @pages; + + foreach my $file (@paths) { + my $diffurl=$config{diffurl}; + $diffurl=~s/\[\[file\]\]/$file/g; + $diffurl=~s/\[\[rev\]\]/$change/g; + push @pages, { + page => pagename($file), + diffurl => $diffurl, + } if length $file; + } + push @ret, { + rev => $change, + user => $user, + committype => $committype, + when => $when, + message => [@message], + pages => [@pages], + } if @pages; + + last if $i == $num; + } + + return @ret; +} + +sub rcs_diff ($) { #{{{ + my $rev=shift; + my $logs = `tla logs -d $config{srcdir}`; + my @changesets = reverse split(/\n/, $logs); + my $i; + + for($i=0;$i<$#changesets;$i++) { + last if $changesets[$i] eq $rev; + } + + my $revminusone = $changesets[$i+1]; + return `tla diff -d $config{srcdir} $revminusone`; +} #}}} + +sub rcs_getctime ($) { #{{{ + my $file=shift; + eval q{use Date::Parse}; + error($@) if $@; + eval q{use Mail::Header}; + error($@) if $@; + + my $logs = `tla logs -d $config{srcdir}`; + my @changesets = reverse split(/\n/, $logs); + my $sdate; + + for (my $i=0; $i<$#changesets; $i++) { + my $change = $changesets[$i]; + + open(LOG, "tla cat-log -d $config{srcdir} $change|"); + my $head = Mail::Header->new(\*LOG); + close(LOG); + + $sdate = $head->get("Standard-date"); + my $newfiles = $head->get("New-files"); + + my ($lastcreation) = grep {/^$file$/} split(/ /, "$newfiles"); + last if defined($lastcreation); + } + + my $date=str2time($sdate, 'UTC'); + debug("found ctime ".localtime($date)." for $file"); + return $date; +} #}}} + +1 diff --git a/IkiWiki/Plugin/typography.pm b/IkiWiki/Plugin/typography.pm index fe6996898..6229e6c33 100644 --- a/IkiWiki/Plugin/typography.pm +++ b/IkiWiki/Plugin/typography.pm @@ -8,6 +8,7 @@ use IkiWiki 2.00; sub import { #{{{ hook(type => "getopt", id => "typography", call => \&getopt); + hook(type => "getsetup", id => "typography", call => \&getsetup); IkiWiki::hook(type => "sanitize", id => "typography", call => \&sanitize); } # }}} @@ -18,11 +19,25 @@ sub getopt () { #{{{ GetOptions("typographyattributes=s" => \$config{typographyattributes}); } #}}} +sub getsetup () { #{{{ + eval q{use Text::Typography}; + error($@) if $@; + + return + typographyattributes => { + type => "string", + example => "3", + description => "Text::Typography attributes value", + safe => 1, + rebuild => 1, + }, +} #}}} + sub sanitize (@) { #{{{ my %params=@_; eval q{use Text::Typography}; - error($@) if $@; + return $params{content} if $@; my $attributes=defined $config{typographyattributes} ? $config{typographyattributes} : '3'; return Text::Typography::typography($params{content}, $attributes); diff --git a/IkiWiki/Rcs/Stub.pm b/IkiWiki/Rcs/Stub.pm deleted file mode 100644 index 04ba5f028..000000000 --- a/IkiWiki/Rcs/Stub.pm +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/perl -# Stubs for no revision control. - -package IkiWiki; - -use warnings; -use strict; -use IkiWiki; - -sub rcs_update () { - # Update working directory to current version. - # (May be more complex for distributed RCS.) -} - -sub rcs_prepedit ($) { - # Prepares to edit a file under revision control. Returns a token - # that must be passed into rcs_commit when the file is ready - # for committing. - # The file is relative to the srcdir. - return "" -} - -sub rcs_commit ($$$;$$) { - # Tries to commit the page; returns undef on _success_ and - # a version of the page with the rcs's conflict markers on failure. - # The file is relative to the srcdir. - my ($file, $message, $rcstoken, $user, $ipaddr) = @_; - return undef # success -} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - return undef # success -} - -sub rcs_add ($) { - # Add a file. The filename is relative to the root of the srcdir. - # Note that this should not check the new file in, it should only - # prepare for it to be checked in when rcs_commit is called. - # Note that the file may be in a new subdir that is not yet added - # to version control; the subdir can be added if so. -} - -sub rcs_remove ($) { - # Remove a file. The filename is relative to the root of the srcdir. - # Note that this should not check the removal in, it should only - # prepare for it to be checked in when rcs_commit is called. - # Note that the new file may be in a new subdir that is not yet added - # to version control; the subdir can be added if so. -} - -sub rcs_rename ($$) { - # Rename a file. The filenames are relative to the root of the srcdir. - # Note that this should not commit the rename, it should only - # prepare it for when rcs_commit is called. - # The new filename may be in a new subdir, that is not yet added to - # version control. If so, the subdir will exist already, and should - # be added to revision control. -} - -sub rcs_recentchanges ($) { - # Examine the RCS history and generate a list of recent changes. - # The data structure returned for each change is: - # { - # rev => # the RCSs id for this commit - # user => # name of user who made the change, - # committype => # either "web" or the name of the rcs, - # when => # time when the change was made, - # message => [ - # { line => "commit message line" }, - # { line => "commit message line" }, - # # etc, - # ], - # pages => [ - # { - # page => # name of page changed, - # diffurl => # optional url to a diff showing - # # the changes, - # }, - # # repeat for each page changed in this commit, - # ], - # } -} - -sub rcs_diff ($) { - # Optional, used to get diffs for recentchanges. - # The parameter is the rev from rcs_recentchanges. - # Should return a list of lines of the diff (including \n) in list - # context, and the whole diff in scalar context. -} - -sub rcs_getctime ($) { - # Optional, used to get the page creation time from the RCS. - error gettext("getctime not implemented"); -} - -1 diff --git a/IkiWiki/Rcs/bzr.pm b/IkiWiki/Rcs/bzr.pm deleted file mode 100644 index c80356159..000000000 --- a/IkiWiki/Rcs/bzr.pm +++ /dev/null @@ -1,220 +0,0 @@ -#!/usr/bin/perl - -package IkiWiki; - -use warnings; -use strict; -use IkiWiki; -use Encode; -use open qw{:utf8 :std}; - -sub bzr_log ($) { #{{{ - my $out = shift; - my @infos = (); - my $key = undef; - - while (<$out>) { - my $line = $_; - my ($value); - if ($line =~ /^message:/) { - $key = "message"; - $infos[$#infos]{$key} = ""; - } - elsif ($line =~ /^(modified|added|renamed|renamed and modified|removed):/) { - $key = "files"; - unless (defined($infos[$#infos]{$key})) { $infos[$#infos]{$key} = ""; } - } - elsif (defined($key) and $line =~ /^ (.*)/) { - $infos[$#infos]{$key} .= "$1\n"; - } - elsif ($line eq "------------------------------------------------------------\n") { - $key = undef; - push (@infos, {}); - } - else { - chomp $line; - ($key, $value) = split /: +/, $line, 2; - $infos[$#infos]{$key} = $value; - } - } - close $out; - - return @infos; -} #}}} - -sub rcs_update () { #{{{ - my @cmdline = ("bzr", "update", "--quiet", $config{srcdir}); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } -} #}}} - -sub rcs_prepedit ($) { #{{{ - return ""; -} #}}} - -sub bzr_author ($$) { #{{{ - my ($user, $ipaddr) = @_; - - if (defined $user) { - return possibly_foolish_untaint($user); - } - elsif (defined $ipaddr) { - return "Anonymous from ".possibly_foolish_untaint($ipaddr); - } - else { - return "Anonymous"; - } -} #}}} - -sub rcs_commit ($$$;$$) { #{{{ - my ($file, $message, $rcstoken, $user, $ipaddr) = @_; - - $user = bzr_author($user, $ipaddr); - - $message = possibly_foolish_untaint($message); - if (! length $message) { - $message = "no message given"; - } - - my @cmdline = ("bzr", "commit", "--quiet", "-m", $message, "--author", $user, - $config{srcdir}."/".$file); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } - - return undef; # success -} #}}} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - - $user = bzr_author($user, $ipaddr); - - $message = possibly_foolish_untaint($message); - if (! length $message) { - $message = "no message given"; - } - - my @cmdline = ("bzr", "commit", "--quiet", "-m", $message, "--author", $user, - $config{srcdir}); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } - - return undef; # success -} #}}} - -sub rcs_add ($) { # {{{ - my ($file) = @_; - - my @cmdline = ("bzr", "add", "--quiet", "$config{srcdir}/$file"); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } -} #}}} - -sub rcs_remove ($) { # {{{ - my ($file) = @_; - - my @cmdline = ("bzr", "rm", "--force", "--quiet", "$config{srcdir}/$file"); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } -} #}}} - -sub rcs_rename ($$) { # {{{ - my ($src, $dest) = @_; - - my $parent = dirname($dest); - if (system("bzr", "add", "--quiet", "$config{srcdir}/$parent") != 0) { - warn("bzr add $parent failed\n"); - } - - my @cmdline = ("bzr", "mv", "--quiet", "$config{srcdir}/$src", "$config{srcdir}/$dest"); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } -} #}}} - -sub rcs_recentchanges ($) { #{{{ - my ($num) = @_; - - my @cmdline = ("bzr", "log", "-v", "--show-ids", "--limit", $num, - $config{srcdir}); - open (my $out, "@cmdline |"); - - eval q{use Date::Parse}; - error($@) if $@; - - my @ret; - foreach my $info (bzr_log($out)) { - my @pages = (); - my @message = (); - - foreach my $msgline (split(/\n/, $info->{message})) { - push @message, { line => $msgline }; - } - - foreach my $file (split(/\n/, $info->{files})) { - my ($filename, $fileid) = ($file =~ /^(.*?) +([^ ]+)$/); - - # Skip directories - next if ($filename =~ /\/$/); - - # Skip source name in renames - $filename =~ s/^.* => //; - - my $diffurl = $config{'diffurl'}; - $diffurl =~ s/\[\[file\]\]/$filename/go; - $diffurl =~ s/\[\[file-id\]\]/$fileid/go; - $diffurl =~ s/\[\[r2\]\]/$info->{revno}/go; - - push @pages, { - page => pagename($filename), - diffurl => $diffurl, - }; - } - - my $user = $info->{"committer"}; - if (defined($info->{"author"})) { $user = $info->{"author"}; } - $user =~ s/\s*<.*>\s*$//; - $user =~ s/^\s*//; - - push @ret, { - rev => $info->{"revno"}, - user => $user, - committype => "bzr", - when => time - str2time($info->{"timestamp"}), - message => [@message], - pages => [@pages], - }; - } - - return @ret; -} #}}} - -sub rcs_getctime ($) { #{{{ - my ($file) = @_; - - # XXX filename passes through the shell here, should try to avoid - # that just in case - my @cmdline = ("bzr", "log", "--limit", '1', "$config{srcdir}/$file"); - open (my $out, "@cmdline |"); - - my @log = bzr_log($out); - - if (length @log < 1) { - return 0; - } - - eval q{use Date::Parse}; - error($@) if $@; - - my $ctime = str2time($log[0]->{"timestamp"}); - return $ctime; -} #}}} - -1 diff --git a/IkiWiki/Rcs/git.pm b/IkiWiki/Rcs/git.pm deleted file mode 100644 index ecf560d0b..000000000 --- a/IkiWiki/Rcs/git.pm +++ /dev/null @@ -1,474 +0,0 @@ -#!/usr/bin/perl - -package IkiWiki; - -use warnings; -use strict; -use IkiWiki; -use Encode; -use open qw{:utf8 :std}; - -my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate Git sha1sums -my $dummy_commit_msg = 'dummy commit'; # message to skip in recent changes - -sub _safe_git (&@) { #{{{ - # Start a child process safely without resorting /bin/sh. - # Return command output or success state (in scalar context). - - my ($error_handler, @cmdline) = @_; - - my $pid = open my $OUT, "-|"; - - error("Cannot fork: $!") if !defined $pid; - - if (!$pid) { - # In child. - # Git commands want to be in wc. - chdir $config{srcdir} - or error("Cannot chdir to $config{srcdir}: $!"); - exec @cmdline or error("Cannot exec '@cmdline': $!"); - } - # In parent. - - my @lines; - while (<$OUT>) { - chomp; - push @lines, $_; - } - - close $OUT; - - $error_handler->("'@cmdline' failed: $!") if $? && $error_handler; - - return wantarray ? @lines : ($? == 0); -} -# Convenient wrappers. -sub run_or_die ($@) { _safe_git(\&error, @_) } -sub run_or_cry ($@) { _safe_git(sub { warn @_ }, @_) } -sub run_or_non ($@) { _safe_git(undef, @_) } -#}}} - -sub _merge_past ($$$) { #{{{ - # Unlike with Subversion, Git cannot make a 'svn merge -rN:M file'. - # Git merge commands work with the committed changes, except in the - # implicit case of '-m' of git checkout(1). So we should invent a - # kludge here. In principle, we need to create a throw-away branch - # in preparing for the merge itself. Since branches are cheap (and - # branching is fast), this shouldn't cost high. - # - # The main problem is the presence of _uncommitted_ local changes. One - # possible approach to get rid of this situation could be that we first - # make a temporary commit in the master branch and later restore the - # initial state (this is possible since Git has the ability to undo a - # commit, i.e. 'git reset --soft HEAD^'). The method can be summarized - # as follows: - # - # - create a diff of HEAD:current-sha1 - # - dummy commit - # - create a dummy branch and switch to it - # - rewind to past (reset --hard to the current-sha1) - # - apply the diff and commit - # - switch to master and do the merge with the dummy branch - # - make a soft reset (undo the last commit of master) - # - # The above method has some drawbacks: (1) it needs a redundant commit - # just to get rid of local changes, (2) somewhat slow because of the - # required system forks. Until someone points a more straight method - # (which I would be grateful) I have implemented an alternative method. - # In this approach, we hide all the modified files from Git by renaming - # them (using the 'rename' builtin) and later restore those files in - # the throw-away branch (that is, we put the files themselves instead - # of applying a patch). - - my ($sha1, $file, $message) = @_; - - my @undo; # undo stack for cleanup in case of an error - my $conflict; # file content with conflict markers - - eval { - # Hide local changes from Git by renaming the modified file. - # Relative paths must be converted to absolute for renaming. - my ($target, $hidden) = ( - "$config{srcdir}/${file}", "$config{srcdir}/${file}.${sha1}" - ); - rename($target, $hidden) - or error("rename '$target' to '$hidden' failed: $!"); - # Ensure to restore the renamed file on error. - push @undo, sub { - return if ! -e "$hidden"; # already renamed - rename($hidden, $target) - or warn "rename '$hidden' to '$target' failed: $!"; - }; - - my $branch = "throw_away_${sha1}"; # supposed to be unique - - # Create a throw-away branch and rewind backward. - push @undo, sub { run_or_cry('git', 'branch', '-D', $branch) }; - run_or_die('git', 'branch', $branch, $sha1); - - # Switch to throw-away branch for the merge operation. - push @undo, sub { - if (!run_or_cry('git', 'checkout', $config{gitmaster_branch})) { - run_or_cry('git', 'checkout','-f',$config{gitmaster_branch}); - } - }; - run_or_die('git', 'checkout', $branch); - - # Put the modified file in _this_ branch. - rename($hidden, $target) - or error("rename '$hidden' to '$target' failed: $!"); - - # _Silently_ commit all modifications in the current branch. - run_or_non('git', 'commit', '-m', $message, '-a'); - # ... and re-switch to master. - run_or_die('git', 'checkout', $config{gitmaster_branch}); - - # Attempt to merge without complaining. - if (!run_or_non('git', 'pull', '--no-commit', '.', $branch)) { - $conflict = readfile($target); - run_or_die('git', 'reset', '--hard'); - } - }; - my $failure = $@; - - # Process undo stack (in reverse order). By policy cleanup - # actions should normally print a warning on failure. - while (my $handle = pop @undo) { - $handle->(); - } - - error("Git merge failed!\n$failure\n") if $failure; - - return $conflict; -} #}}} - -sub _parse_diff_tree ($@) { #{{{ - # Parse the raw diff tree chunk and return the info hash. - # See git-diff-tree(1) for the syntax. - - my ($prefix, $dt_ref) = @_; - - # End of stream? - return if !defined @{ $dt_ref } || - !defined @{ $dt_ref }[0] || !length @{ $dt_ref }[0]; - - my %ci; - # Header line. - while (my $line = shift @{ $dt_ref }) { - return if $line !~ m/^(.+) ($sha1_pattern)/; - - my $sha1 = $2; - $ci{'sha1'} = $sha1; - last; - } - - # Identification lines for the commit. - while (my $line = shift @{ $dt_ref }) { - # Regexps are semi-stolen from gitweb.cgi. - if ($line =~ m/^tree ([0-9a-fA-F]{40})$/) { - $ci{'tree'} = $1; - } - elsif ($line =~ m/^parent ([0-9a-fA-F]{40})$/) { - # XXX: collecting in reverse order - push @{ $ci{'parents'} }, $1; - } - elsif ($line =~ m/^(author|committer) (.*) ([0-9]+) (.*)$/) { - my ($who, $name, $epoch, $tz) = - ($1, $2, $3, $4 ); - - $ci{ $who } = $name; - $ci{ "${who}_epoch" } = $epoch; - $ci{ "${who}_tz" } = $tz; - - if ($name =~ m/^[^<]+\s+<([^@>]+)/) { - $ci{"${who}_username"} = $1; - } - elsif ($name =~ m/^([^<]+)\s+<>$/) { - $ci{"${who}_username"} = $1; - } - else { - $ci{"${who}_username"} = $name; - } - } - elsif ($line =~ m/^$/) { - # Trailing empty line signals next section. - last; - } - } - - debug("No 'tree' seen in diff-tree output") if !defined $ci{'tree'}; - - if (defined $ci{'parents'}) { - $ci{'parent'} = @{ $ci{'parents'} }[0]; - } - else { - $ci{'parent'} = 0 x 40; - } - - # Commit message (optional). - while ($dt_ref->[0] =~ /^ /) { - my $line = shift @{ $dt_ref }; - $line =~ s/^ //; - push @{ $ci{'comment'} }, $line; - } - shift @{ $dt_ref } if $dt_ref->[0] =~ /^$/; - - # Modified files. - while (my $line = shift @{ $dt_ref }) { - if ($line =~ m{^ - (:+) # number of parents - ([^\t]+)\t # modes, sha1, status - (.*) # file names - $}xo) { - my $num_parents = length $1; - my @tmp = split(" ", $2); - my ($file, $file_to) = split("\t", $3); - my @mode_from = splice(@tmp, 0, $num_parents); - my $mode_to = shift(@tmp); - my @sha1_from = splice(@tmp, 0, $num_parents); - my $sha1_to = shift(@tmp); - my $status = shift(@tmp); - - if ($file =~ m/^"(.*)"$/) { - ($file=$1) =~ s/\\([0-7]{1,3})/chr(oct($1))/eg; - } - $file =~ s/^\Q$prefix\E//; - if (length $file) { - push @{ $ci{'details'} }, { - 'file' => decode_utf8($file), - 'sha1_from' => $sha1_from[0], - 'sha1_to' => $sha1_to, - }; - } - next; - }; - last; - } - - return \%ci; -} #}}} - -sub git_commit_info ($;$) { #{{{ - # Return an array of commit info hashes of num commits (default: 1) - # starting from the given sha1sum. - - my ($sha1, $num) = @_; - - $num ||= 1; - - my @raw_lines = run_or_die('git', 'log', "--max-count=$num", - '--pretty=raw', '--raw', '--abbrev=40', '--always', '-c', - '-r', $sha1, '--', '.'); - my ($prefix) = run_or_die('git', 'rev-parse', '--show-prefix'); - - my @ci; - while (my $parsed = _parse_diff_tree(($prefix or ""), \@raw_lines)) { - push @ci, $parsed; - } - - warn "Cannot parse commit info for '$sha1' commit" if !@ci; - - return wantarray ? @ci : $ci[0]; -} #}}} - -sub git_sha1 (;$) { #{{{ - # Return head sha1sum (of given file). - - my $file = shift || q{--}; - - # Ignore error since a non-existing file might be given. - my ($sha1) = run_or_non('git', 'rev-list', '--max-count=1', 'HEAD', - '--', $file); - if ($sha1) { - ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now - } else { debug("Empty sha1sum for '$file'.") } - return defined $sha1 ? $sha1 : q{}; -} #}}} - -sub rcs_update () { #{{{ - # Update working directory. - - if (length $config{gitorigin_branch}) { - run_or_cry('git', 'pull', $config{gitorigin_branch}); - } -} #}}} - -sub rcs_prepedit ($) { #{{{ - # Return the commit sha1sum of the file when editing begins. - # This will be later used in rcs_commit if a merge is required. - - my ($file) = @_; - - return git_sha1($file); -} #}}} - -sub rcs_commit ($$$;$$) { #{{{ - # Try to commit the page; returns undef on _success_ and - # a version of the page with the rcs's conflict markers on - # failure. - - my ($file, $message, $rcstoken, $user, $ipaddr) = @_; - - # Check to see if the page has been changed by someone else since - # rcs_prepedit was called. - my $cur = git_sha1($file); - my ($prev) = $rcstoken =~ /^($sha1_pattern)$/; # untaint - - if (defined $cur && defined $prev && $cur ne $prev) { - my $conflict = _merge_past($prev, $file, $dummy_commit_msg); - return $conflict if defined $conflict; - } - - rcs_add($file); - return rcs_commit_staged($message, $user, $ipaddr); -} #}}} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - - # Set the commit author and email to the web committer. - my %env=%ENV; - if (defined $user || defined $ipaddr) { - my $u=defined $user ? $user : $ipaddr; - $ENV{GIT_AUTHOR_NAME}=$u; - $ENV{GIT_AUTHOR_EMAIL}="$u\@web"; - } - - # git commit returns non-zero if file has not been really changed. - # so we should ignore its exit status (hence run_or_non). - $message = possibly_foolish_untaint($message); - if (run_or_non('git', 'commit', '--cleanup=verbatim', - '-q', '-m', $message)) { - if (length $config{gitorigin_branch}) { - run_or_cry('git', 'push', $config{gitorigin_branch}); - } - } - - %ENV=%env; - return undef; # success -} - -sub rcs_add ($) { # {{{ - # Add file to archive. - - my ($file) = @_; - - run_or_cry('git', 'add', $file); -} #}}} - -sub rcs_remove ($) { # {{{ - # Remove file from archive. - - my ($file) = @_; - - run_or_cry('git', 'rm', '-f', $file); -} #}}} - -sub rcs_rename ($$) { # {{{ - my ($src, $dest) = @_; - - run_or_cry('git', 'mv', '-f', $src, $dest); -} #}}} - -sub rcs_recentchanges ($) { #{{{ - # List of recent changes. - - my ($num) = @_; - - eval q{use Date::Parse}; - error($@) if $@; - - my @rets; - foreach my $ci (git_commit_info('HEAD', $num)) { - # Skip redundant commits. - next if ($ci->{'comment'} && @{$ci->{'comment'}}[0] eq $dummy_commit_msg); - - my ($sha1, $when) = ( - $ci->{'sha1'}, - $ci->{'author_epoch'} - ); - - my @pages; - foreach my $detail (@{ $ci->{'details'} }) { - my $file = $detail->{'file'}; - - my $diffurl = $config{'diffurl'}; - $diffurl =~ s/\[\[file\]\]/$file/go; - $diffurl =~ s/\[\[sha1_parent\]\]/$ci->{'parent'}/go; - $diffurl =~ s/\[\[sha1_from\]\]/$detail->{'sha1_from'}/go; - $diffurl =~ s/\[\[sha1_to\]\]/$detail->{'sha1_to'}/go; - - push @pages, { - page => pagename($file), - diffurl => $diffurl, - }; - } - - my @messages; - my $pastblank=0; - foreach my $line (@{$ci->{'comment'}}) { - $pastblank=1 if $line eq ''; - next if $pastblank && $line=~m/^ *(signed[ \-]off[ \-]by[ :]|acked[ \-]by[ :]|cc[ :])/i; - push @messages, { line => $line }; - } - - my $user=$ci->{'author_username'}; - my $web_commit = ($ci->{'author'} =~ /\@web>/); - - # compatability code for old web commit messages - if (! $web_commit && - defined $messages[0] && - $messages[0]->{line} =~ m/$config{web_commit_regexp}/) { - $user = defined $2 ? "$2" : "$3"; - $messages[0]->{line} = $4; - $web_commit=1; - } - - push @rets, { - rev => $sha1, - user => $user, - committype => $web_commit ? "web" : "git", - when => $when, - message => [@messages], - pages => [@pages], - } if @pages; - - last if @rets >= $num; - } - - return @rets; -} #}}} - -sub rcs_diff ($) { #{{{ - my $rev=shift; - my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint - my @lines; - foreach my $line (run_or_non("git", "show", $sha1)) { - if (@lines || $line=~/^diff --git/) { - push @lines, $line."\n"; - } - } - if (wantarray) { - return @lines; - } - else { - return join("", @lines); - } -} #}}} - -sub rcs_getctime ($) { #{{{ - my $file=shift; - # Remove srcdir prefix - $file =~ s/^\Q$config{srcdir}\E\/?//; - - my $sha1 = git_sha1($file); - my $ci = git_commit_info($sha1); - my $ctime = $ci->{'author_epoch'}; - debug("ctime for '$file': ". localtime($ctime)); - - return $ctime; -} #}}} - -1 diff --git a/IkiWiki/Rcs/mercurial.pm b/IkiWiki/Rcs/mercurial.pm deleted file mode 100644 index 8c3f03e07..000000000 --- a/IkiWiki/Rcs/mercurial.pm +++ /dev/null @@ -1,197 +0,0 @@ -#!/usr/bin/perl - -package IkiWiki; - -use warnings; -use strict; -use IkiWiki; -use Encode; -use open qw{:utf8 :std}; - -sub mercurial_log($) { - my $out = shift; - my @infos; - - while (<$out>) { - my $line = $_; - my ($key, $value); - - if (/^description:/) { - $key = "description"; - $value = ""; - - # slurp everything as the description text - # until the next changeset - while (<$out>) { - if (/^changeset: /) { - $line = $_; - last; - } - - $value .= $_; - } - - local $/ = ""; - chomp $value; - $infos[$#infos]{$key} = $value; - } - - chomp $line; - ($key, $value) = split /: +/, $line, 2; - - if ($key eq "changeset") { - push @infos, {}; - - # remove the revision index, which is strictly - # local to the repository - $value =~ s/^\d+://; - } - - $infos[$#infos]{$key} = $value; - } - close $out; - - return @infos; -} - -sub rcs_update () { #{{{ - my @cmdline = ("hg", "-q", "-R", "$config{srcdir}", "update"); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } -} #}}} - -sub rcs_prepedit ($) { #{{{ - return ""; -} #}}} - -sub rcs_commit ($$$;$$) { #{{{ - my ($file, $message, $rcstoken, $user, $ipaddr) = @_; - - if (defined $user) { - $user = possibly_foolish_untaint($user); - } - elsif (defined $ipaddr) { - $user = "Anonymous from ".possibly_foolish_untaint($ipaddr); - } - else { - $user = "Anonymous"; - } - - $message = possibly_foolish_untaint($message); - if (! length $message) { - $message = "no message given"; - } - - my @cmdline = ("hg", "-q", "-R", $config{srcdir}, "commit", - "-m", $message, "-u", $user); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } - - return undef; # success -} #}}} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - - error("rcs_commit_staged not implemented for mercurial"); # TODO -} - -sub rcs_add ($) { # {{{ - my ($file) = @_; - - my @cmdline = ("hg", "-q", "-R", "$config{srcdir}", "add", "$config{srcdir}/$file"); - if (system(@cmdline) != 0) { - warn "'@cmdline' failed: $!"; - } -} #}}} - -sub rcs_remove ($) { # {{{ - my ($file) = @_; - - error("rcs_remove not implemented for mercurial"); # TODO -} #}}} - -sub rcs_rename ($$) { # {{{ - my ($src, $dest) = @_; - - error("rcs_rename not implemented for mercurial"); # TODO -} #}}} - -sub rcs_recentchanges ($) { #{{{ - my ($num) = @_; - - my @cmdline = ("hg", "-R", $config{srcdir}, "log", "-v", "-l", $num, - "--style", "default"); - open (my $out, "@cmdline |"); - - eval q{use Date::Parse}; - error($@) if $@; - - my @ret; - foreach my $info (mercurial_log($out)) { - my @pages = (); - my @message = (); - - foreach my $msgline (split(/\n/, $info->{description})) { - push @message, { line => $msgline }; - } - - foreach my $file (split / /,$info->{files}) { - my $diffurl = $config{'diffurl'}; - $diffurl =~ s/\[\[file\]\]/$file/go; - $diffurl =~ s/\[\[r2\]\]/$info->{changeset}/go; - - push @pages, { - page => pagename($file), - diffurl => $diffurl, - }; - } - - my $user = $info->{"user"}; - $user =~ s/\s*<.*>\s*$//; - $user =~ s/^\s*//; - - push @ret, { - rev => $info->{"changeset"}, - user => $user, - committype => "mercurial", - when => str2time($info->{"date"}), - message => [@message], - pages => [@pages], - }; - } - - return @ret; -} #}}} - -sub rcs_diff ($) { #{{{ - # TODO -} #}}} - -sub rcs_getctime ($) { #{{{ - my ($file) = @_; - - # XXX filename passes through the shell here, should try to avoid - # that just in case - my @cmdline = ("hg", "-R", $config{srcdir}, "log", "-v", "-l", '1', - "--style", "default", "$config{srcdir}/$file"); - open (my $out, "@cmdline |"); - - my @log = mercurial_log($out); - - if (length @log < 1) { - return 0; - } - - eval q{use Date::Parse}; - error($@) if $@; - - my $ctime = str2time($log[0]->{"date"}); - return $ctime; -} #}}} - -1 diff --git a/IkiWiki/Rcs/monotone.pm b/IkiWiki/Rcs/monotone.pm deleted file mode 100644 index 500af5c58..000000000 --- a/IkiWiki/Rcs/monotone.pm +++ /dev/null @@ -1,612 +0,0 @@ -#!/usr/bin/perl - -package IkiWiki; - -use warnings; -use strict; -use IkiWiki; -use Monotone; -use Date::Parse qw(str2time); -use Date::Format qw(time2str); - -my $sha1_pattern = qr/[0-9a-fA-F]{40}/; # pattern to validate sha1sums - -sub check_config() { #{{{ - if (!defined($config{mtnrootdir})) { - $config{mtnrootdir} = $config{srcdir}; - } - if (! -d "$config{mtnrootdir}/_MTN") { - error("Ikiwiki srcdir does not seem to be a Monotone workspace (or set the mtnrootdir)!"); - } - - chdir $config{srcdir} - or error("Cannot chdir to $config{srcdir}: $!"); - - my $child = open(MTN, "-|"); - if (! $child) { - open STDERR, ">/dev/null"; - exec("mtn", "version") || error("mtn version failed to run"); - } - - my $version=undef; - while () { - if (/^monotone (\d+\.\d+) /) { - $version=$1; - } - } - - close MTN || debug("mtn version exited $?"); - - if (!defined($version)) { - error("Cannot determine monotone version"); - } - if ($version < 0.38) { - error("Monotone version too old, is $version but required 0.38"); - } -} #}}} - -sub get_rev () { #{{{ - my $sha1 = `mtn --root=$config{mtnrootdir} automate get_base_revision_id`; - - ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now - if (! $sha1) { - debug("Unable to get base revision for '$config{srcdir}'.") - } - - return $sha1; -} #}}} - -sub get_rev_auto ($) { #{{{ - my $automator=shift; - - my @results = $automator->call("get_base_revision_id"); - - my $sha1 = $results[0]; - ($sha1) = $sha1 =~ m/($sha1_pattern)/; # sha1 is untainted now - if (! $sha1) { - debug("Unable to get base revision for '$config{srcdir}'.") - } - - return $sha1; -} #}}} - -sub mtn_merge ($$$$) { #{{{ - my $leftRev=shift; - my $rightRev=shift; - my $branch=shift; - my $author=shift; - - my $mergeRev; - - my $child = open(MTNMERGE, "-|"); - if (! $child) { - open STDERR, ">&STDOUT"; - exec("mtn", "--root=$config{mtnrootdir}", - "explicit_merge", $leftRev, $rightRev, - $branch, "--author", $author, "--key", - $config{mtnkey}) || error("mtn merge failed to run"); - } - - while () { - if (/^mtn.\s.merged.\s($sha1_pattern)$/) { - $mergeRev=$1; - } - } - - close MTNMERGE || return undef; - - debug("merged $leftRev, $rightRev to make $mergeRev"); - - return $mergeRev; -} #}}} - -sub commit_file_to_new_rev($$$$$$$$) { #{{{ - my $automator=shift; - my $wsfilename=shift; - my $oldFileID=shift; - my $newFileContents=shift; - my $oldrev=shift; - my $branch=shift; - my $author=shift; - my $message=shift; - - #store the file - my ($out, $err) = $automator->call("put_file", $oldFileID, $newFileContents); - my ($newFileID) = ($out =~ m/^($sha1_pattern)$/); - error("Failed to store file data for $wsfilename in repository") - if (! defined $newFileID || length $newFileID != 40); - - # get the mtn filename rather than the workspace filename - ($out, $err) = $automator->call("get_corresponding_path", $oldrev, $wsfilename, $oldrev); - my ($filename) = ($out =~ m/^file "(.*)"$/); - error("Couldn't find monotone repository path for file $wsfilename") if (! $filename); - debug("Converted ws filename of $wsfilename to repos filename of $filename"); - - # then stick in a new revision for this file - my $manifest = "format_version \"1\"\n\n". - "new_manifest [0000000000000000000000000000000000000000]\n\n". - "old_revision [$oldrev]\n\n". - "patch \"$filename\"\n". - " from [$oldFileID]\n". - " to [$newFileID]\n"; - ($out, $err) = $automator->call("put_revision", $manifest); - my ($newRevID) = ($out =~ m/^($sha1_pattern)$/); - error("Unable to make new monotone repository revision") - if (! defined $newRevID || length $newRevID != 40); - debug("put revision: $newRevID"); - - # now we need to add certs for this revision... - # author, branch, changelog, date - $automator->call("cert", $newRevID, "author", $author); - $automator->call("cert", $newRevID, "branch", $branch); - $automator->call("cert", $newRevID, "changelog", $message); - $automator->call("cert", $newRevID, "date", - time2str("%Y-%m-%dT%T", time, "UTC")); - - debug("Added certs for rev: $newRevID"); - return $newRevID; -} #}}} - -sub read_certs ($$) { #{{{ - my $automator=shift; - my $rev=shift; - my @results = $automator->call("certs", $rev); - my @ret; - - my $line = $results[0]; - while ($line =~ m/\s+key\s"(.*?)"\nsignature\s"(ok|bad|unknown)"\n\s+name\s"(.*?)"\n\s+value\s"(.*?)"\n\s+trust\s"(trusted|untrusted)"\n/sg) { - push @ret, { - key => $1, - signature => $2, - name => $3, - value => $4, - trust => $5, - }; - } - - return @ret; -} #}}} - -sub get_changed_files ($$) { #{{{ - my $automator=shift; - my $rev=shift; - - my @results = $automator->call("get_revision", $rev); - my $changes=$results[0]; - - my @ret; - my %seen = (); - - while ($changes =~ m/\s*(add_file|patch|delete|rename)\s"(.*?)(?new(); - $automator->open_args("--root", $config{mtnrootdir}, "--key", $config{mtnkey}); - - # Something has been committed, has this file changed? - my ($out, $err); - $automator->setOpts("r", $oldrev, "r", $rev); - ($out, $err) = $automator->call("content_diff", $file); - debug("Problem committing $file") if ($err ne ""); - my $diff = $out; - - if ($diff) { - # Commit a revision with just this file changed off - # the old revision. - # - # first get the contents - debug("File changed: forming branch"); - my $newfile=readfile("$config{srcdir}/$file"); - - # then get the old content ID from the diff - if ($diff !~ m/^---\s$file\s+($sha1_pattern)$/m) { - error("Unable to find previous file ID for $file"); - } - my $oldFileID = $1; - - # get the branch we're working in - ($out, $err) = $automator->call("get_option", "branch"); - chomp $out; - error("Illegal branch name in monotone workspace") if ($out !~ m/^([-\@\w\.]+)$/); - my $branch = $1; - - # then put the new content into the DB (and record the new content ID) - my $newRevID = commit_file_to_new_rev($automator, $file, $oldFileID, $newfile, $oldrev, $branch, $author, $message); - - $automator->close(); - - # if we made it to here then the file has been committed... revert the local copy - if (system("mtn", "--root=$config{mtnrootdir}", "revert", $file) != 0) { - debug("Unable to revert $file after merge on conflicted commit!"); - } - debug("Divergence created! Attempting auto-merge."); - - # see if it will merge cleanly - $ENV{MTN_MERGE}="fail"; - my $mergeResult = mtn_merge($newRevID, $rev, $branch, $author); - $ENV{MTN_MERGE}=""; - - # push any changes so far - if (defined($config{mtnsync}) && $config{mtnsync}) { - if (system("mtn", "--root=$config{mtnrootdir}", "push", "--quiet", "--ticker=none", "--key", $config{mtnkey}) != 0) { - debug("monotone push failed"); - } - } - - if (defined($mergeResult)) { - # everything is merged - bring outselves up to date - if (system("mtn", "--root=$config{mtnrootdir}", - "update", "-r", $mergeResult) != 0) { - debug("Unable to update to rev $mergeResult after merge on conflicted commit!"); - } - } - else { - debug("Auto-merge failed. Using diff-merge to add conflict markers."); - - $ENV{MTN_MERGE}="diffutils"; - $ENV{MTN_MERGE_DIFFUTILS}="partial=true"; - $mergeResult = mtn_merge($newRevID, $rev, $branch, $author); - $ENV{MTN_MERGE}=""; - $ENV{MTN_MERGE_DIFFUTILS}=""; - - if (!defined($mergeResult)) { - debug("Unable to insert conflict markers!"); - error("Your commit succeeded. Unfortunately, someone else committed something to the same ". - "part of the wiki at the same time. Both versions are stored in the monotone repository, ". - "but at present the different versions cannot be reconciled through the web interface. ". - "Please use the non-web interface to resolve the conflicts."); - } - - if (system("mtn", "--root=$config{mtnrootdir}", - "update", "-r", $mergeResult) != 0) { - debug("Unable to update to rev $mergeResult after conflict-enhanced merge on conflicted commit!"); - } - - # return "conflict enhanced" file to the user - # for cleanup note, this relies on the fact - # that ikiwiki seems to call rcs_prepedit() - # again after we return - return readfile("$config{srcdir}/$file"); - } - return undef; - } - $automator->close(); - } - - # If we reached here then the file we're looking at hasn't changed - # since $oldrev. Commit it. - - if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet", - "--author", $author, "--key", $config{mtnkey}, "-m", - possibly_foolish_untaint($message), $file) != 0) { - debug("Traditional commit failed! Returning data as conflict."); - my $conflict=readfile("$config{srcdir}/$file"); - if (system("mtn", "--root=$config{mtnrootdir}", "revert", - "--quiet", $file) != 0) { - debug("monotone revert failed"); - } - return $conflict; - } - if (defined($config{mtnsync}) && $config{mtnsync}) { - if (system("mtn", "--root=$config{mtnrootdir}", "push", - "--quiet", "--ticker=none", "--key", - $config{mtnkey}) != 0) { - debug("monotone push failed"); - } - } - - return undef # success -} #}}} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - - # Note - this will also commit any spurious changes that happen to be - # lying around in the working copy. There shouldn't be any, but... - - check_config(); - - my $author; - - if (defined $user) { - $author="Web user: " . $user; - } - elsif (defined $ipaddr) { - $author="Web IP: " . $ipaddr; - } - else { - $author="Web: Anonymous"; - } - - if (system("mtn", "--root=$config{mtnrootdir}", "commit", "--quiet", - "--author", $author, "--key", $config{mtnkey}, "-m", - possibly_foolish_untaint($message)) != 0) { - error("Monotone commit failed"); - } -} - -sub rcs_add ($) { #{{{ - my $file=shift; - - check_config(); - - if (system("mtn", "--root=$config{mtnrootdir}", "add", "--quiet", - $file) != 0) { - error("Monotone add failed"); - } -} #}}} - -sub rcs_remove ($) { # {{{ - my $file = shift; - - check_config(); - - # Note: it is difficult to undo a remove in Monotone at the moment. - # Until this is fixed, it might be better to make 'rm' move things - # into an attic, rather than actually remove them. - # To resurrect a file, you currently add a new file with the contents - # you want it to have. This loses all connectivity and automated - # merging with the 'pre-delete' versions of the file. - - if (system("mtn", "--root=$config{mtnrootdir}", "rm", "--quiet", - $file) != 0) { - error("Monotone remove failed"); - } -} #}}} - -sub rcs_rename ($$) { # {{{ - my ($src, $dest) = @_; - - check_config(); - - if (system("mtn", "--root=$config{mtnrootdir}", "rename", "--quiet", - $src, $dest) != 0) { - error("Monotone rename failed"); - } -} #}}} - -sub rcs_recentchanges ($) { #{{{ - my $num=shift; - my @ret; - - check_config(); - - # use log --brief to get a list of revs, as this - # gives the results in a nice order - # (otherwise we'd have to do our own date sorting) - - my @revs; - - my $child = open(MTNLOG, "-|"); - if (! $child) { - exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", - "--brief") || error("mtn log failed to run"); - } - - while (($num >= 0) and (my $line = )) { - if ($line =~ m/^($sha1_pattern)/) { - push @revs, $1; - $num -= 1; - } - } - close MTNLOG || debug("mtn log exited $?"); - - my $automator = Monotone->new(); - $automator->open(undef, $config{mtnrootdir}); - - while (@revs != 0) { - my $rev = shift @revs; - # first go through and figure out the messages, etc - - my $certs = [read_certs($automator, $rev)]; - - my $user; - my $when; - my $committype; - my (@pages, @message); - - foreach my $cert (@$certs) { - if ($cert->{signature} eq "ok" && - $cert->{trust} eq "trusted") { - if ($cert->{name} eq "author") { - $user = $cert->{value}; - # detect the source of the commit - # from the changelog - if ($cert->{key} eq $config{mtnkey}) { - $committype = "web"; - } else { - $committype = "monotone"; - } - } elsif ($cert->{name} eq "date") { - $when = str2time($cert->{value}, 'UTC'); - } elsif ($cert->{name} eq "changelog") { - my $messageText = $cert->{value}; - # split the changelog into multiple - # lines - foreach my $msgline (split(/\n/, $messageText)) { - push @message, { line => $msgline }; - } - } - } - } - - my @changed_files = get_changed_files($automator, $rev); - my $file; - - my ($out, $err) = $automator->call("parents", $rev); - my @parents = ($out =~ m/^($sha1_pattern)$/); - my $parent = $parents[0]; - - foreach $file (@changed_files) { - next unless length $file; - - if (defined $config{diffurl} and (@parents == 1)) { - my $diffurl=$config{diffurl}; - $diffurl=~s/\[\[r1\]\]/$parent/g; - $diffurl=~s/\[\[r2\]\]/$rev/g; - $diffurl=~s/\[\[file\]\]/$file/g; - push @pages, { - page => pagename($file), - diffurl => $diffurl, - }; - } - else { - push @pages, { - page => pagename($file), - } - } - } - - push @ret, { - rev => $rev, - user => $user, - committype => $committype, - when => $when, - message => [@message], - pages => [@pages], - } if @pages; - } - - $automator->close(); - - return @ret; -} #}}} - -sub rcs_diff ($) { #{{{ - my $rev=shift; - my ($sha1) = $rev =~ /^($sha1_pattern)$/; # untaint - - check_config(); - - my $child = open(MTNDIFF, "-|"); - if (! $child) { - exec("mtn", "diff", "--root=$config{mtnrootdir}", "-r", "p:".$sha1, "-r", $sha1) || error("mtn diff $sha1 failed to run"); - } - - my (@lines) = ; - - close MTNDIFF || debug("mtn diff $sha1 exited $?"); - - if (wantarray) { - return @lines; - } - else { - return join("", @lines); - } -} #}}} - -sub rcs_getctime ($) { #{{{ - my $file=shift; - - check_config(); - - my $child = open(MTNLOG, "-|"); - if (! $child) { - exec("mtn", "log", "--root=$config{mtnrootdir}", "--no-graph", - "--brief", $file) || error("mtn log $file failed to run"); - } - - my $firstRev; - while () { - if (/^($sha1_pattern)/) { - $firstRev=$1; - } - } - close MTNLOG || debug("mtn log $file exited $?"); - - if (! defined $firstRev) { - debug "failed to parse mtn log for $file"; - return 0; - } - - my $automator = Monotone->new(); - $automator->open(undef, $config{mtnrootdir}); - - my $certs = [read_certs($automator, $firstRev)]; - - $automator->close(); - - my $date; - - foreach my $cert (@$certs) { - if ($cert->{signature} eq "ok" && $cert->{trust} eq "trusted") { - if ($cert->{name} eq "date") { - $date = $cert->{value}; - } - } - } - - if (! defined $date) { - debug "failed to find date cert for revision $firstRev when looking for creation time of $file"; - return 0; - } - - $date=str2time($date, 'UTC'); - debug("found ctime ".localtime($date)." for $file"); - return $date; -} #}}} - -1 diff --git a/IkiWiki/Rcs/svn.pm b/IkiWiki/Rcs/svn.pm deleted file mode 100644 index 9081c3902..000000000 --- a/IkiWiki/Rcs/svn.pm +++ /dev/null @@ -1,311 +0,0 @@ -#!/usr/bin/perl - -package IkiWiki::Rcs::svn; - -use warnings; -use strict; -use IkiWiki; -use POSIX qw(setlocale LC_CTYPE); - -sub import { #{{{ - if (exists $IkiWiki::config{svnpath}) { - # code depends on the path not having extraneous slashes - $IkiWiki::config{svnpath}=~tr#/#/#s; - $IkiWiki::config{svnpath}=~s/\/$//; - $IkiWiki::config{svnpath}=~s/^\///; - } -} #}}} - - -package IkiWiki; - -# svn needs LC_CTYPE set to a UTF-8 locale, so try to find one. Any will do. -sub find_lc_ctype() { - my $current = setlocale(LC_CTYPE()); - return $current if $current =~ m/UTF-?8$/i; - - # Make some obvious attempts to avoid calling `locale -a` - foreach my $locale ("$current.UTF-8", "en_US.UTF-8", "en_GB.UTF-8") { - return $locale if setlocale(LC_CTYPE(), $locale); - } - - # Try to get all available locales and pick the first UTF-8 one found. - if (my @locale = grep(/UTF-?8$/i, `locale -a`)) { - chomp @locale; - return $locale[0] if setlocale(LC_CTYPE(), $locale[0]); - } - - # fallback to the current locale - return $current; -} # }}} -$ENV{LC_CTYPE} = $ENV{LC_CTYPE} || find_lc_ctype(); - -sub svn_info ($$) { #{{{ - my $field=shift; - my $file=shift; - - my $info=`LANG=C svn info $file`; - my ($ret)=$info=~/^$field: (.*)$/m; - return $ret; -} #}}} - -sub rcs_update () { #{{{ - if (-d "$config{srcdir}/.svn") { - if (system("svn", "update", "--quiet", $config{srcdir}) != 0) { - warn("svn update failed\n"); - } - } -} #}}} - -sub rcs_prepedit ($) { #{{{ - # Prepares to edit a file under revision control. Returns a token - # that must be passed into rcs_commit when the file is ready - # for committing. - # The file is relative to the srcdir. - my $file=shift; - - if (-d "$config{srcdir}/.svn") { - # For subversion, return the revision of the file when - # editing begins. - my $rev=svn_info("Revision", "$config{srcdir}/$file"); - return defined $rev ? $rev : ""; - } -} #}}} - -sub rcs_commit ($$$;$$) { #{{{ - # Tries to commit the page; returns undef on _success_ and - # a version of the page with the rcs's conflict markers on failure. - # The file is relative to the srcdir. - my $file=shift; - my $message=shift; - my $rcstoken=shift; - my $user=shift; - my $ipaddr=shift; - - if (defined $user) { - $message="web commit by $user".(length $message ? ": $message" : ""); - } - elsif (defined $ipaddr) { - $message="web commit from $ipaddr".(length $message ? ": $message" : ""); - } - - if (-d "$config{srcdir}/.svn") { - # Check to see if the page has been changed by someone - # else since rcs_prepedit was called. - my ($oldrev)=$rcstoken=~/^([0-9]+)$/; # untaint - my $rev=svn_info("Revision", "$config{srcdir}/$file"); - if (defined $rev && defined $oldrev && $rev != $oldrev) { - # Merge their changes into the file that we've - # changed. - if (system("svn", "merge", "--quiet", "-r$oldrev:$rev", - "$config{srcdir}/$file", "$config{srcdir}/$file") != 0) { - warn("svn merge -r$oldrev:$rev failed\n"); - } - } - - if (system("svn", "commit", "--quiet", - "--encoding", "UTF-8", "-m", - possibly_foolish_untaint($message), - $config{srcdir}) != 0) { - my $conflict=readfile("$config{srcdir}/$file"); - if (system("svn", "revert", "--quiet", "$config{srcdir}/$file") != 0) { - warn("svn revert failed\n"); - } - return $conflict; - } - } - return undef # success -} #}}} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - - if (defined $user) { - $message="web commit by $user".(length $message ? ": $message" : ""); - } - elsif (defined $ipaddr) { - $message="web commit from $ipaddr".(length $message ? ": $message" : ""); - } - - if (system("svn", "commit", "--quiet", - "--encoding", "UTF-8", "-m", - possibly_foolish_untaint($message), - $config{srcdir}) != 0) { - warn("svn commit failed\n"); - return 1; # failure - } - return undef # success -} - -sub rcs_add ($) { #{{{ - # filename is relative to the root of the srcdir - my $file=shift; - - if (-d "$config{srcdir}/.svn") { - my $parent=dirname($file); - while (! -d "$config{srcdir}/$parent/.svn") { - $file=$parent; - $parent=dirname($file); - } - - if (system("svn", "add", "--quiet", "$config{srcdir}/$file") != 0) { - warn("svn add failed\n"); - } - } -} #}}} - -sub rcs_remove ($) { #{{{ - # filename is relative to the root of the srcdir - my $file=shift; - - if (-d "$config{srcdir}/.svn") { - if (system("svn", "rm", "--force", "--quiet", "$config{srcdir}/$file") != 0) { - warn("svn rm failed\n"); - } - } -} #}}} - -sub rcs_rename ($$) { #{{{ - # filenames relative to the root of the srcdir - my ($src, $dest)=@_; - - if (-d "$config{srcdir}/.svn") { - # Add parent directory for $dest - my $parent=dirname($dest); - if (! -d "$config{srcdir}/$parent/.svn") { - while (! -d "$config{srcdir}/$parent/.svn") { - $parent=dirname($dest); - } - if (system("svn", "add", "--quiet", "$config{srcdir}/$parent") != 0) { - warn("svn add $parent failed\n"); - } - } - - if (system("svn", "mv", "--force", "--quiet", - "$config{srcdir}/$src", "$config{srcdir}/$dest") != 0) { - warn("svn rename failed\n"); - } - } -} #}}} - -sub rcs_recentchanges ($) { #{{{ - my $num=shift; - my @ret; - - return unless -d "$config{srcdir}/.svn"; - - eval q{ - use Date::Parse; - use XML::SAX; - use XML::Simple; - }; - error($@) if $@; - - # avoid using XML::SAX::PurePerl, it's buggy with UTF-8 data - my @parsers = map { ${$_}{Name} } @{XML::SAX->parsers()}; - do { - $XML::Simple::PREFERRED_PARSER = pop @parsers; - } until $XML::Simple::PREFERRED_PARSER ne 'XML::SAX::PurePerl'; - - # --limit is only supported on Subversion 1.2.0+ - my $svn_version=`svn --version -q`; - my $svn_limit=''; - $svn_limit="--limit $num" - if $svn_version =~ /\d\.(\d)\.\d/ && $1 >= 2; - - my $svn_url=svn_info("URL", $config{srcdir}); - my $xml = XMLin(scalar `svn $svn_limit --xml -v log '$svn_url'`, - ForceArray => [ 'logentry', 'path' ], - GroupTags => { paths => 'path' }, - KeyAttr => { path => 'content' }, - ); - foreach my $logentry (@{$xml->{logentry}}) { - my (@pages, @message); - - my $rev = $logentry->{revision}; - my $user = $logentry->{author}; - - my $when=str2time($logentry->{date}, 'UTC'); - - foreach my $msgline (split(/\n/, $logentry->{msg})) { - push @message, { line => $msgline }; - } - - my $committype="web"; - if (defined $message[0] && - $message[0]->{line}=~/$config{web_commit_regexp}/) { - $user=defined $2 ? "$2" : "$3"; - $message[0]->{line}=$4; - } - else { - $committype="svn"; - } - - foreach my $file (keys %{$logentry->{paths}}) { - if (length $config{svnpath}) { - next unless $file=~/^\/\Q$config{svnpath}\E\/([^ ]+)(?:$|\s)/; - $file=$1; - } - - my $diffurl=$config{diffurl}; - $diffurl=~s/\[\[file\]\]/$file/g; - $diffurl=~s/\[\[r1\]\]/$rev - 1/eg; - $diffurl=~s/\[\[r2\]\]/$rev/g; - - push @pages, { - page => pagename($file), - diffurl => $diffurl, - } if length $file; - } - push @ret, { - rev => $rev, - user => $user, - committype => $committype, - when => $when, - message => [@message], - pages => [@pages], - } if @pages; - return @ret if @ret >= $num; - } - - return @ret; -} #}}} - -sub rcs_diff ($) { #{{{ - my $rev=possibly_foolish_untaint(int(shift)); - return `svnlook diff $config{svnrepo} -r$rev --no-diff-deleted`; -} #}}} - -sub rcs_getctime ($) { #{{{ - my $file=shift; - - my $svn_log_infoline=qr/^r\d+\s+\|\s+[^\s]+\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/; - - my $child = open(SVNLOG, "-|"); - if (! $child) { - exec("svn", "log", $file) || error("svn log $file failed to run"); - } - - my $date; - while () { - if (/$svn_log_infoline/) { - $date=$1; - } - } - close SVNLOG || warn "svn log $file exited $?"; - - if (! defined $date) { - warn "failed to parse svn log for $file\n"; - return 0; - } - - eval q{use Date::Parse}; - error($@) if $@; - $date=str2time($date); - debug("found ctime ".localtime($date)." for $file"); - return $date; -} #}}} - -1 diff --git a/IkiWiki/Rcs/tla.pm b/IkiWiki/Rcs/tla.pm deleted file mode 100644 index 4232e1fe8..000000000 --- a/IkiWiki/Rcs/tla.pm +++ /dev/null @@ -1,227 +0,0 @@ -#!/usr/bin/perl - -package IkiWiki; - -use warnings; -use strict; -use IkiWiki; - -sub quiet_system (@) { - # See Debian bug #385939. - open (SAVEOUT, ">&STDOUT"); - close STDOUT; - open (STDOUT, ">/dev/null"); - my $ret=system(@_); - close STDOUT; - open (STDOUT, ">&SAVEOUT"); - close SAVEOUT; - return $ret; -} - -sub rcs_update () { #{{{ - if (-d "$config{srcdir}/{arch}") { - if (quiet_system("tla", "replay", "-d", $config{srcdir}) != 0) { - warn("tla replay failed\n"); - } - } -} #}}} - -sub rcs_prepedit ($) { #{{{ - my $file=shift; - - if (-d "$config{srcdir}/{arch}") { - # For Arch, return the tree-id of archive when - # editing begins. - my $rev=`tla tree-id $config{srcdir}`; - return defined $rev ? $rev : ""; - } -} #}}} - -sub rcs_commit ($$$;$$) { #{{{ - my $file=shift; - my $message=shift; - my $rcstoken=shift; - my $user=shift; - my $ipaddr=shift; - - if (defined $user) { - $message="web commit by $user".(length $message ? ": $message" : ""); - } - elsif (defined $ipaddr) { - $message="web commit from $ipaddr".(length $message ? ": $message" : ""); - } - - if (-d "$config{srcdir}/{arch}") { - # Check to see if the page has been changed by someone - # else since rcs_prepedit was called. - my ($oldrev)=$rcstoken=~/^([A-Za-z0-9@\/._-]+)$/; # untaint - my $rev=`tla tree-id $config{srcdir}`; - if (defined $rev && defined $oldrev && $rev ne $oldrev) { - # Merge their changes into the file that we've - # changed. - if (quiet_system("tla", "update", "-d", - "$config{srcdir}") != 0) { - warn("tla update failed\n"); - } - } - - if (quiet_system("tla", "commit", - "-L".possibly_foolish_untaint($message), - '-d', $config{srcdir}) != 0) { - my $conflict=readfile("$config{srcdir}/$file"); - if (system("tla", "undo", "-n", "--quiet", "-d", "$config{srcdir}") != 0) { - warn("tla undo failed\n"); - } - return $conflict; - } - } - return undef # success -} #}}} - -sub rcs_commit_staged ($$$) { - # Commits all staged changes. Changes can be staged using rcs_add, - # rcs_remove, and rcs_rename. - my ($message, $user, $ipaddr)=@_; - - error("rcs_commit_staged not implemented for tla"); # TODO -} - -sub rcs_add ($) { #{{{ - my $file=shift; - - if (-d "$config{srcdir}/{arch}") { - if (quiet_system("tla", "add", "$config{srcdir}/$file") != 0) { - warn("tla add failed\n"); - } - } -} #}}} - -sub rcs_remove ($) { # {{{ - my $file = shift; - - error("rcs_remove not implemented for tla"); # TODO -} #}}} - -sub rcs_rename ($$) { # {{{a - my ($src, $dest) = @_; - - error("rcs_rename not implemented for tla"); # TODO -} #}}} - -sub rcs_recentchanges ($) { - my $num=shift; - my @ret; - - return unless -d "$config{srcdir}/{arch}"; - - eval q{use Date::Parse}; - error($@) if $@; - eval q{use Mail::Header}; - error($@) if $@; - - my $logs = `tla logs -d $config{srcdir}`; - my @changesets = reverse split(/\n/, $logs); - - for (my $i=0; $i<$num && $i<$#changesets; $i++) { - my ($change)=$changesets[$i]=~/^([A-Za-z0-9@\/._-]+)$/; # untaint - - open(LOG, "tla cat-log -d $config{srcdir} $change|"); - my $head = Mail::Header->new(\*LOG); - close(LOG); - - my $rev = $head->get("Revision"); - my $summ = $head->get("Summary"); - my $newfiles = $head->get("New-files"); - my $modfiles = $head->get("Modified-files"); - my $remfiles = $head->get("Removed-files"); - my $user = $head->get("Creator"); - - my @paths = grep { !/^(.*\/)?\.arch-ids\/.*\.id$/ } - split(/ /, "$newfiles $modfiles .arch-ids/fake.id"); - - my $sdate = $head->get("Standard-date"); - my $when = str2time($sdate, 'UTC'); - - my $committype = "web"; - if (defined $summ && $summ =~ /$config{web_commit_regexp}/) { - $user = defined $2 ? "$2" : "$3"; - $summ = $4; - } - else { - $committype="tla"; - } - - my @message; - push @message, { line => $summ }; - - my @pages; - - foreach my $file (@paths) { - my $diffurl=$config{diffurl}; - $diffurl=~s/\[\[file\]\]/$file/g; - $diffurl=~s/\[\[rev\]\]/$change/g; - push @pages, { - page => pagename($file), - diffurl => $diffurl, - } if length $file; - } - push @ret, { - rev => $change, - user => $user, - committype => $committype, - when => $when, - message => [@message], - pages => [@pages], - } if @pages; - - last if $i == $num; - } - - return @ret; -} - -sub rcs_diff ($) { #{{{ - my $rev=shift; - my $logs = `tla logs -d $config{srcdir}`; - my @changesets = reverse split(/\n/, $logs); - my $i; - - for($i=0;$i<$#changesets;$i++) { - last if $changesets[$i] eq $rev; - } - - my $revminusone = $changesets[$i+1]; - return `tla diff -d $config{srcdir} $revminusone`; -} #}}} - -sub rcs_getctime ($) { #{{{ - my $file=shift; - eval q{use Date::Parse}; - error($@) if $@; - eval q{use Mail::Header}; - error($@) if $@; - - my $logs = `tla logs -d $config{srcdir}`; - my @changesets = reverse split(/\n/, $logs); - my $sdate; - - for (my $i=0; $i<$#changesets; $i++) { - my $change = $changesets[$i]; - - open(LOG, "tla cat-log -d $config{srcdir} $change|"); - my $head = Mail::Header->new(\*LOG); - close(LOG); - - $sdate = $head->get("Standard-date"); - my $newfiles = $head->get("New-files"); - - my ($lastcreation) = grep {/^$file$/} split(/ /, "$newfiles"); - last if defined($lastcreation); - } - - my $date=str2time($sdate, 'UTC'); - debug("found ctime ".localtime($date)." for $file"); - return $date; -} #}}} - -1 diff --git a/IkiWiki/Render.pm b/IkiWiki/Render.pm index 90058199c..cb92d1ade 100644 --- a/IkiWiki/Render.pm +++ b/IkiWiki/Render.pm @@ -68,7 +68,7 @@ sub genpage ($$) { #{{{ $actions++; } - if (length $config{historyurl}) { + if (defined $config{historyurl} && length $config{historyurl}) { my $u=$config{historyurl}; $u=~s/\[\[file\]\]/$pagesources{$page}/g; $template->param(historyurl => $u); diff --git a/IkiWiki/Setup.pm b/IkiWiki/Setup.pm index 3b7a11253..38b715202 100644 --- a/IkiWiki/Setup.pm +++ b/IkiWiki/Setup.pm @@ -1,20 +1,18 @@ #!/usr/bin/perl # Ikiwiki setup files are perl files that 'use IkiWiki::Setup::foo', # passing it some sort of configuration data. -# -# There can be multiple modules, with different configuration styles. -# The setup modules each convert the data into the hashes used by ikiwiki -# internally (if it's not already in that format), and store it in -# IkiWiki::Setup::$raw_setup, to pass it back to this module. package IkiWiki::Setup; use warnings; use strict; use IkiWiki; -use IkiWiki::Wrapper; use open qw{:utf8 :std}; +# There can be multiple modules, with different configuration styles. +# The setup modules each convert the data into the hashes used by ikiwiki +# internally (if it's not already in that format), and store it in +# IkiWiki::Setup::$raw_setup, to pass it back to this module. our $raw_setup; sub load ($) { # {{{ @@ -34,54 +32,33 @@ sub load ($) { # {{{ eval $code; error("$setup: ".$@) if $@; - my $ret=$raw_setup; + my %setup=%{$raw_setup}; $raw_setup=undef; - return %$ret; -} #}}} - -package IkiWiki; - -sub setup () { #{{{ - my %setup=IkiWiki::Setup::load($config{setup}); - - $setup{plugin}=$config{plugin}; + # Merge setup into existing config and untaint. if (exists $setup{add_plugins}) { - push @{$setup{plugin}}, @{$setup{add_plugins}}; - delete $setup{add_plugins}; + push @{$setup{add_plugins}}, @{$config{add_plugins}}; } if (exists $setup{exclude}) { push @{$config{wiki_file_prune_regexps}}, $setup{exclude}; } - - if (! $config{render} && (! $config{refresh} || $config{wrappers})) { - debug(gettext("generating wrappers..")); - my @wrappers=@{$setup{wrappers}}; - delete $setup{wrappers}; - my %startconfig=(%config); - foreach my $wrapper (@wrappers) { - %config=(%startconfig, rebuild => 0, verbose => 0, %setup, %{$wrapper}); - checkconfig(); - if (! $config{cgi} && ! $config{post_commit}) { - $config{post_commit}=1; - } - gen_wrapper(); - } - %config=(%startconfig); - } - foreach my $c (keys %setup) { - next if $c eq 'syslog'; if (defined $setup{$c}) { - if (! ref $setup{$c}) { - $config{$c}=possibly_foolish_untaint($setup{$c}); + if (! ref $setup{$c} || ref $setup{$c} eq 'Regexp') { + $config{$c}=IkiWiki::possibly_foolish_untaint($setup{$c}); } elsif (ref $setup{$c} eq 'ARRAY') { - $config{$c}=[map { possibly_foolish_untaint($_) } @{$setup{$c}}] + if ($c eq 'wrappers') { + # backwards compatability code + $config{$c}=$setup{$c}; + } + else { + $config{$c}=[map { IkiWiki::possibly_foolish_untaint($_) } @{$setup{$c}}] + } } elsif (ref $setup{$c} eq 'HASH') { foreach my $key (keys %{$setup{$c}}) { - $config{$c}{$key}=possibly_foolish_untaint($setup{$c}{$key}); + $config{$c}{$key}=IkiWiki::possibly_foolish_untaint($setup{$c}{$key}); } } } @@ -90,32 +67,24 @@ sub setup () { #{{{ } } - if (! $config{refresh}) { - $config{rebuild}=1; - } - - loadplugins(); - checkconfig(); - - require IkiWiki::Render; - - if ($config{render}) { - commandline_render(); - } - - if (! $config{refresh}) { - debug(gettext("rebuilding wiki..")); - } - else { - debug(gettext("refreshing wiki..")); + if (length $config{cgi_wrapper}) { + push @{$config{wrappers}}, { + cgi => 1, + wrapper => $config{cgi_wrapper}, + wrappermode => (defined $config{cgi_wrappermode} ? $config{cgi_wrappermode} : "06755"), + }; } +} #}}} - lockwiki(); - loadindex(); - refresh(); +sub dump ($) { #{{{ + my $file=IkiWiki::possibly_foolish_untaint(shift); + + require IkiWiki::Setup::Standard; + my @dump=IkiWiki::Setup::Standard::gendump("Setup file for ikiwiki."); - debug(gettext("done")); - saveindex(); -} #}}} + open (OUT, ">", $file) || die "$file: $!"; + print OUT "$_\n" foreach @dump; + close OUT; +} 1 diff --git a/IkiWiki/Setup/Standard.pm b/IkiWiki/Setup/Standard.pm index f67c3829b..99da7ed86 100644 --- a/IkiWiki/Setup/Standard.pm +++ b/IkiWiki/Setup/Standard.pm @@ -7,9 +7,122 @@ package IkiWiki::Setup::Standard; use warnings; use strict; +use IkiWiki; -sub import { +sub import { #{{{ $IkiWiki::Setup::raw_setup=$_[1]; -} +} #}}} + +sub dumpline ($$$$) { #{{{ + my $key=shift; + my $value=shift; + my $type=shift; + my $prefix=shift; + + eval q{use Data::Dumper}; + error($@) if $@; + local $Data::Dumper::Terse=1; + local $Data::Dumper::Indent=1; + local $Data::Dumper::Pad="\t"; + local $Data::Dumper::Sortkeys=1; + local $Data::Dumper::Quotekeys=0; + + my $dumpedvalue; + if ($type eq 'boolean' || $type eq 'integer') { + # avoid quotes + $dumpedvalue=$value; + } + elsif ($type eq 'string' && ref $value eq 'ARRAY' && @$value && + ! grep { /[^-A-Za-z0-9_]/ } @$value) { + # dump simple array as qw{} + $dumpedvalue="[qw{ ".join(" ", @$value)." }]"; + } + else { + $dumpedvalue=Dumper($value); + chomp $dumpedvalue; + if (length $prefix) { + # add to second and subsequent lines + my @lines=split(/\n/, $dumpedvalue); + $dumpedvalue=""; + for (my $x=0; $x <= $#lines; $x++) { + $lines[$x] =~ s/^\t//; + $dumpedvalue.="\t".($x ? $prefix : "").$lines[$x]."\n"; + } + } + $dumpedvalue=~s/^\t//; + chomp $dumpedvalue; + } + + return "\t$prefix$key => $dumpedvalue,"; +} #}}} + +sub dumpvalues ($@) { #{{{ + my $setup=shift; + my @ret; + while (@_) { + my $key=shift; + my %info=%{shift()}; + + next if $info{type} eq "internal"; + + push @ret, "\t# ".$info{description} if exists $info{description}; + + if (exists $setup->{$key} && defined $setup->{$key}) { + push @ret, dumpline($key, $setup->{$key}, $info{type}, ""); + delete $setup->{$key}; + } + elsif (exists $info{example}) { + push @ret, dumpline($key, $info{example}, $info{type}, "#"); + } + } + return @ret; +} #}}} + +sub gendump ($) { #{{{ + my $description=shift; + my %setup=(%config); + my @ret; + + # disable logging to syslog while dumping + $config{syslog}=0; + + push @ret, "\t# basic setup"; + push @ret, dumpvalues(\%setup, IkiWiki::getsetup()); + + # Load all plugins, so that all setup options are available. + # (But skip a few problematic external demo plugins.) + my @plugins=grep { ! /^(externaldemo|pythondemo|\Q$config{rcs}\E)$/ } + sort(IkiWiki::listplugins()); + unshift @plugins, $config{rcs} if $config{rcs}; # rcs plugin 1st + foreach my $plugin (@plugins) { + eval { IkiWiki::loadplugin($plugin) }; + if (exists $IkiWiki::hooks{checkconfig}{$plugin}{call}) { + my @s=eval { $IkiWiki::hooks{checkconfig}{$plugin}{call}->() }; + } + } + + foreach my $id (@plugins) { + if (exists $IkiWiki::hooks{getsetup}{$id}{call}) { + # use an array rather than a hash, to preserve order + my @s=eval { $IkiWiki::hooks{getsetup}{$id}{call}->() }; + next unless @s; + push @ret, "", "\t# $id plugin"; + push @ret, dumpvalues(\%setup, @s); + } + } + + unshift @ret, + "#!/usr/bin/perl", + "# $description", + "#", + "# Passing this to ikiwiki --setup will make ikiwiki generate", + "# wrappers and build the wiki.", + "#", + "# Remember to re-run ikiwiki --setup any time you edit this file.", + "use IkiWiki::Setup::Standard {"; + push @ret, "}"; + + return @ret; +} #}}} 1 diff --git a/Makefile.PL b/Makefile.PL index 8c7ac7019..f7c090ff2 100755 --- a/Makefile.PL +++ b/Makefile.PL @@ -30,7 +30,10 @@ ikiwiki.out: ikiwiki.in ./pm_filter $(PREFIX) $(VER) $(PROBABLE_INST_LIB) < ikiwiki.in > ikiwiki.out chmod +x ikiwiki.out -extra_build: ikiwiki.out +ikiwiki.setup: ikiwiki.out + HOME=/home/me $(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.out -libdir . -dumpsetup ikiwiki.setup + +extra_build: ikiwiki.out ikiwiki.setup $(PERL) -Iblib/lib $(extramodules) $(tflag) ikiwiki.out -libdir . -setup docwiki.setup -refresh ./mdwn2man ikiwiki 1 doc/usage.mdwn > ikiwiki.man ./mdwn2man ikiwiki-mass-rebuild 8 doc/ikiwiki-mass-rebuild.mdwn > ikiwiki-mass-rebuild.man @@ -41,8 +44,7 @@ extra_build: ikiwiki.out extra_clean: rm -rf html doc/.ikiwiki - rm -f *.man ikiwiki.out - rm -f plugins/*.pyc + rm -f *.man ikiwiki.out ikiwiki.setup plugins/*.pyc $(MAKE) -C po clean extra_install: @@ -62,7 +64,7 @@ extra_install: install -d $(DESTDIR)$(PREFIX)/lib/ikiwiki/plugins for file in `find plugins -maxdepth 1 -type f ! -wholename plugins/.\*`; do \ - install -m 755 $$file $(DESTDIR)$(PREFIX)/lib/ikiwiki/plugins; \ + cp -a $$file $(DESTDIR)$(PREFIX)/lib/ikiwiki/plugins; \ done; \ install -d $(DESTDIR)$(PREFIX)/share/man/man1 diff --git a/debian/changelog b/debian/changelog index 77a08c456..08be87fbe 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,15 @@ +ikiwiki (2.60) UNRELEASED; urgency=low + + * Add getsetup hook, all plugins that add fields to %config should use it. + * ikiwiki --dumpsetup can generate a nice setup file snapshotting ikiwiki's + current configuration. + * Large amounts of internal config data reorg. + * The way wrappers are defined in the setup file has changed. Old setup + files will continue to work, for now. + * Version control backends promoted to first-class plugins. + + -- Joey Hess Mon, 21 Jul 2008 11:35:46 -0400 + ikiwiki (2.55) UNRELEASED; urgency=low * remove: New plugin that adds the ability to remove pages via the web. diff --git a/debian/examples b/debian/examples index d94312ed0..a1e623192 100644 --- a/debian/examples +++ b/debian/examples @@ -1 +1 @@ -doc/ikiwiki.setup +ikiwiki.setup diff --git a/doc/bugs/Command-line_arguments_should_override_settings_in_the_setup_file.mdwn b/doc/bugs/Command-line_arguments_should_override_settings_in_the_setup_file.mdwn index f1ccf5896..22b1c2cb5 100644 --- a/doc/bugs/Command-line_arguments_should_override_settings_in_the_setup_file.mdwn +++ b/doc/bugs/Command-line_arguments_should_override_settings_in_the_setup_file.mdwn @@ -1,5 +1,5 @@ In setting up my wiki I followed the [[setup]] instruction which point -to an [[ikiwiki.setup]] file that contains "verbose => 0". +to an ikiwiki.setup file that contains "verbose => 0". I hadn't noticed that setting in there, but later when I changed my standard command of: diff --git a/doc/ikiwiki.setup b/doc/ikiwiki.setup deleted file mode 100644 index 31a60b9ca..000000000 --- a/doc/ikiwiki.setup +++ /dev/null @@ -1,206 +0,0 @@ -#!/usr/bin/perl -# Configuration file for ikiwiki. -# Passing this to ikiwiki --setup will make ikiwiki generate wrappers and -# build the wiki. -# -# Remember to re-run ikiwiki --setup any time you edit this file. - -use IkiWiki::Setup::Standard { - wikiname => "MyWiki", - #adminuser => ["yourname", ], - adminemail => 'me@example.org', - - # Be sure to customise these.. - srcdir => "/path/to/source", - destdir => "/var/www/wiki", - - url => "http://example.org/wiki", - cgiurl => "http://example.org/wiki/ikiwiki.cgi", - #templatedir => "/usr/share/ikiwiki/templates", - #underlaydir => "/usr/share/ikiwiki/basewiki", - - # Subversion stuff. - #rcs => "svn", - #historyurl => "http://svn.example.org/trunk/[[file]]", - #diffurl => "http://svn.example.org/trunk/[[file]]?root=wiki&r1=[[r1]]&r2=[[r2]]", - #svnrepo => "/svn/wiki", - #svnpath => "trunk", - - # Git stuff. - #rcs => "git", - #historyurl => "http://git.example.org/gitweb.cgi?p=wiki.git;a=history;f=[[file]]", - #diffurl => "http://git.example.org/gitweb.cgi?p=wiki.git;a=blobdiff;h=[[sha1_to]];hp=[[sha1_from]];hb=[[sha1_parent]];f=[[file]]", - #gitorigin_branch => "origin", - #gitmaster_branch => "master", - - # Tla stuff. - #rcs => "tla" - #historyurl => ??, - #diffurl => ??, - - # Mercurial stuff. - #rcs => "mercurial", - #historyurl => "http://localhost:8000/log/tip/[[file]]", # hg serve'd local repository - #diffurl => "http://localhost:8000/?fd=[[r2]];file=[[file]]", - - # Bazaar stuff. - #rcs => "bzr", - #historyurl => "", - #diffurl => "http://example.com/revision?start_revid=[[r2]]#[[file]]-s", # using loggerhead - - # Monotone stuff - #rcs => "monotone", - #mtnkey => "web\@machine.company.com", - #historyurl => "http://viewmtn.example.com/branch/head/filechanges/com.example.branch/[[file]]", - #diffurl => "http://viewmtn.example.com/revision/diff/[[r1]]/with/[[r2]]/[[file]]", - # Set if you want the wiki to sync on update and commit. - #mtnsync => 0, - # The path to your workspace (defaults to the srcdir itself) - # e.g. use if your srcdir is a subdirectory of the workspace. - #mtnrootdir => "path/to/root/of/workspace", - - wrappers => [ - #{ - # # The cgi wrapper. - # cgi => 1, - # wrapper => "/var/www/wiki/ikiwiki.cgi", - # wrappermode => "06755", - #}, - #{ - # # The svn post-commit wrapper. - # # Note that this will overwrite any existing - # # post-commit hook script, which may not be - # # what you want. - # wrapper => "/svn/wikirepo/hooks/post-commit", - # wrappermode => "04755", - # # Log to syslog since svn post-commit hooks - # # hide output and errors. - # syslog => 1, - #}, - #{ - # # The git post-update wrapper. - # # Note that this will overwrite any existing - # # post-update hook script, which may not be - # # what you want. - # wrapper => "/git/wiki.git/hooks/post-update", - # wrappermode => "06755", - #}, - #{ - # # The monotone netsync hook. - # wrapper => "path/to/root/of/workspace/_MTN/ikiwiki-netsync-hook", - # wrappermode => "06755", - #}, - ], - - # Default to generating rss feeds for pages with feeds? - #rss => 1, - # Default to generating atom feeds for pages with feeds? - #atom => 1, - # Allow generating feeds even if not generated by default? - #allowrss => 1, - #allowatom => 1, - # Urls to ping with XML-RPC when feeds are updated - #pingurl => [qw{http://rpc.technorati.com/rpc/ping}], - # Include discussion links on all pages? - discussion => 1, - # To exclude files matching a regexp from processing. This adds to - # the default exclude list. - #exclude => qr/\.wav$/, - # To change the extension used for generated html files. - #htmlext => 'htm', - # Time format (for strftime) - #timeformat => '%c', - # Locale to use. Must be a UTF-8 locale. - #locale => 'en_US.UTF-8', - # Only send cookies over SSL connections. - #sslcookie => 1, - # Logging settings: - #verbose => 1, - syslog => 0, - # To link to user pages in a subdirectory of the wiki. - #userdir => "users", - # To create output files named page.html rather than page/index.html. - #usedirs => 0, - # Simple spam prevention: require an account-creation password. - #account_creation_password => "example", - # Cost of generating a password using Authen::Passphrase::BlowfishCrypt - #password_cost => 8, - # Uncomment to force ikiwiki to run with a particular umask. - #umask => 022, - # Default settings for the recentchanges page. - #recentchangespage => "recentchanges", - #recentchangesnum => 100, - # Use new '!'-prefixed preprocessor directive syntax - #prefix_directives => 1, - # Attempt to make hardlinks to source files instead of copying them. - # Useful if the wiki contains large media files. - #hardlink => 1, - # Enable use of multimarkdown features in .mdwn files. - #multimarkdown => 1, - - # To add plugins, list them here. - #add_plugins => [qw{goodstuff search wikitext camelcase - # htmltidy fortune sidebar map rst anonok}], - # If you want to disable any of the default plugins, list them here. - #disable_plugins => [qw{inline htmlscrubber passwordauth openid}], - # To add a directory to the perl search path, use this. - #libdir => "/home/me/.ikiwiki/", - - # To override environment variable settings, you can list values here. - #ENV => { - # TZ => "America/New_York", - # PATH => "/home/me/bin:/usr/local/bin:/usr/bin:/bin", - #}, - - # For use with the tag plugin, make all tags be located under a - # base page. - #tagbase => "tag", - - # For use with the search plugin if the omega cgi is located - # somewhere else. - #omega_cgi => "/usr/lib/cgi-bin/omega/omega", - - # For use with the openid plugin, to give an url to a page users - # can use to signup for an OpenID. - #openidsignup => "http://myopenid.com/", - - # For use with the mirrorlist plugin, a list of mirrors. - #mirrorlist => { - # mirror1 => "http://hostname1", - # mirror2 => "http://hostname2/mirror", - #}, - - # For use with the anonok plugin, a PageSpec specifying what - # pages anonymous users can edit - #anonok_pagespec => "*", - - # For use with the aggregate plugin. - # Enable aggregation to internal pages. New wikis should set this to 1, - # but if you use aggregate already, read the aggregate plugin docs - # before enabling it. - #aggregateinternal => 1, - # Allow aggregation to be triggered via the web. - #aggregate_webtrigger => 1, - - # For use with the pinger plugin, how many seconds to wait before - # timing out. - #pinger_timeout => 15. - - # For use with the amazon S3 plugin, your public access key id. - #amazon_s3_key_id => 'XXXXXXXXXXXXXXXXXXXX', - # And a file holding your secret key. This file *must* not be - # readable by others! - #amazon_s3_key_file => "/home/me/.hide/.s3_key - # The globally unique name of the bucket to use to store the wiki. - #amazon_s3_bucket => "mywiki", - # A prefix to prepend to each page name. - #amazon_s3_prefix => "wiki/", - # Uncomment to use the S3 European datacenter. - #amazon_s3_location => "EU", - # Uncomment if you need to store each index file twice. - #amazon_s3_dupindex => 1, - - # For use with the attachment plugin, a program that returns - # nonzero if its standard input contains an virus. - #virus_checker => "clamdscan -", -} diff --git a/doc/plugins.mdwn b/doc/plugins.mdwn index 5ee2bae89..83735c5fd 100644 --- a/doc/plugins.mdwn +++ b/doc/plugins.mdwn @@ -7,7 +7,7 @@ There's documentation if you want to [[write]] your own plugins, or you can [[install]] plugins [[contributed|contrib]] by others. To enable a plugin, use the `--plugin` switch described in -[[usage]], or the equivalent `add_plugins` line in [[ikiwiki.setup]]. +[[usage]], or the equivalent `add_plugins` line in ikiwiki.setup. Enable the [[goodstuff]] plugin to get a nice selection of plugins that will fit most uses of ikiwiki. diff --git a/doc/plugins/mirrorlist.mdwn b/doc/plugins/mirrorlist.mdwn index 89022e5ab..b371e8eb7 100644 --- a/doc/plugins/mirrorlist.mdwn +++ b/doc/plugins/mirrorlist.mdwn @@ -4,5 +4,4 @@ This plugin allows adding links a list of mirrors to each page in the wiki. For each mirror, a name and an url should be specified. Pages are assumed to exist in the same location under the specified url on each -mirror. The [[ikiwiki.setup]] file has an example of configuring a list of -mirrors. +mirror. diff --git a/doc/plugins/write.mdwn b/doc/plugins/write.mdwn index 7c28088de..6d5056162 100644 --- a/doc/plugins/write.mdwn +++ b/doc/plugins/write.mdwn @@ -357,6 +357,47 @@ something. The hook is passed named parameters: `page`, `oldpage`, `newpage`, and `content`, and should try to modify the content to reflect the name change. For example, by converting links to point to the new page. +### getsetup + + hook(type => "getsetup", id => "foo", call => \&getsetup); + +This hooks is not called during normal operation, but only when setting up +the wiki, or generating a setup file. Plugins can use this hook to add +configuration options. + +The hook is passed no parameters. It returns data about the configuration +options added by the plugin. It can also check if the plugin is usable, and +die if not, which will cause the plugin to not be offered in the configuration +interface. + +The data returned is a list of `%config` options, followed by a hash +describing the option. For example: + + return + option_foo => { + type => "boolean", + description => "enable foo", + safe => 1, + rebuild => 1, + }, + option_bar => { + type => "string", + example => "hello", + description => "what to say", + safe => 1, + rebuild => 0, + }, + +* `type` can be "boolean", "string", "integer", "internal" (used for values + that are not user-visible). The type is the type of the leaf values; + the `%config` option may be an array or hash of these. +* `example` can be set to an example value. +* `description` is a short description of the option. +* `safe` should be false if the option should not be displayed in unsafe + configuration methods, such as the web interface. Anything that specifies + a command to run, a path on disk, or a regexp should be marked as unsafe. +* `rebuild` should be true if changing the option will require a wiki rebuild. + ## Plugin interface To import the ikiwiki plugin interface: @@ -376,7 +417,7 @@ it's not exported, the wise choice is to not use it. A plugin can access the wiki's configuration via the `%config` hash. The best way to understand the contents of the hash is to look at -[[ikiwiki.setup]], which sets the hash content to configure the wiki. +your ikiwiki setup file, which sets the hash content to configure the wiki. ### %pagestate @@ -613,15 +654,107 @@ PageSpecs glob patterns, but instead only by a special `internal()` ### RCS plugins -ikiwiki's support for [[revision_control_systems|rcs]] also uses pluggable -perl modules. These are in the `IkiWiki::RCS` namespace, for example -`IkiWiki::RCS::svn`. +ikiwiki's support for [[revision_control_systems|rcs]] is also done via +plugins. See [[RCS_details|rcs/details]] for some more info. + +RCS plugins must register a number of hooks. Each hook has type 'rcs', +and the 'id' field is set to the name of the hook. For example: + + hook(type => "rcs", id => "rcs_update", call => \&rcs_update); + hook(type => "rcs", id => "rcs_prepedit", call => \&rcs_prepedit); + +#### `rcs_update()` + +Updates the working directory with any remote changes. + +#### `rcs_prepedit($)` + +Is passed a file to prepare to edit. It can generate and return an arbitrary +token, that will be passed into `rcs_commit` when committing. For example, +it might return the current revision ID of the file, and use that +information later when merging changes. + +#### `rcs_commit($$$;$$)` + +Passed a file, message, token (from `rcs_prepedit`), user, and ip address. +Should try to commit the file. Returns `undef` on *success* and a version +of the page with the rcs's conflict markers on failure. + +#### `rcs_commit_staged($$$)` + +Passed a message, user, and ip address. Should commit all staged changes. +Returns undef on success, and an error message on failure. + +Changes can be staged by calls to `rcs_add, `rcs_remove`, and +`rcs_rename`. + +#### `rcs_add($)` + +Adds the passed file to the archive. The filename is relative to the root +of the srcdir. + +Note that this should not check the new file in, it should only +prepare for it to be checked in when rcs_commit (or `rcs_commit_staged`) is +called. Note that the file may be in a new subdir that is not yet in +to version control; the subdir can be added if so. + +#### `rcs_remove($)` + +Remove a file. The filename is relative to the root of the srcdir. + +Note that this should not check the removal in, it should only prepare for it +to be checked in when `rcs_commit` (or `rcs_commit_staged`) is called. Note +that the new file may be in a new subdir that is not yet inversion +control; the subdir can be added if so. + +#### `rcs_rename($$)` + +Rename a file. The filenames are relative to the root of the srcdir. + +Note that this should not commit the rename, it should only +prepare it for when `rcs_commit` (or `rcs_commit_staged`) is called. +The new filename may be in a new subdir, that is not yet added to +version control. If so, the subdir will exist already, and should +be added to revision control. + +#### `rcs_recentchanges($)` + +Examine the RCS history and generate a list of recent changes. +The parameter is how many changes to return. + +The data structure returned for each change is: + + { + rev => # the RCSs id for this commit + user => # name of user who made the change, + committype => # either "web" or the name of the rcs, + when => # time when the change was made, + message => [ + { line => "commit message line 1" }, + { line => "commit message line 2" }, + # etc, + ], + pages => [ + { + page => # name of page changed, + diffurl => # optional url to a diff of changes + }, + # repeat for each page changed in this commit, + ], + } + +#### `rcs_diff($)` + +The parameter is the rev from `rcs_recentchanges`. +Should return a list of lines of the diff (including \n) in list +context, and the whole diff in scalar context. + +#### `rcs_getctime($)` -Each RCS plugin must support all the `IkiWiki::rcs_*` functions. -See IkiWiki::RCS::Stub for the full list of functions. It's ok if -`rcs_getctime` does nothing except for throwing an error. +This is used to get the page creation time for a file from the RCS, by looking +it up in the history. -See [[RCS_details|rcs/details]] for some more info. +It's ok if this is not implemented, and throws an error. ### PageSpec plugins @@ -636,15 +769,15 @@ IkiWiki::FailReason object if the match fails. ### Setup plugins -The ikiwiki setup file is loaded using a pluggable mechanism. If you -look at the top of [[ikiwiki.setup]], it starts with -'use IkiWiki::Setup::Standard', and the rest of the file is passed to -that module's import method. +The ikiwiki setup file is loaded using a pluggable mechanism. If you look +at the top of a setup file, it starts with 'use IkiWiki::Setup::Standard', +and the rest of the file is passed to that module's import method. It's possible to write other modules in the `IkiWiki::Setup::` namespace that can be used to configure ikiwiki in different ways. These modules should, when imported, populate `$IkiWiki::Setup::raw_setup` with a reference -to a hash containing all the config items. +to a hash containing all the config items. They should also implement a +`gendump` function. By the way, to parse a ikiwiki setup file, a program just needs to do something like: diff --git a/doc/rcs/details.mdwn b/doc/rcs/details.mdwn index 9bf65762f..e62f3ef49 100644 --- a/doc/rcs/details.mdwn +++ b/doc/rcs/details.mdwn @@ -266,8 +266,7 @@ on the same local machine, I suggest to create the latter with the "`git clone - command to save disk space. Note that, as a rule of thumb, you should always put the rcs wrapper (`post-update`) -into the master repository (`.git/hooks/`) as can be noticed in the Git wrappers of -the sample [[ikiwiki.setup]]. +into the master repository (`.git/hooks/`). Here is how a web edit works with ikiwiki and git: diff --git a/doc/security.mdwn b/doc/security.mdwn index 498d2b4e7..0841abf49 100644 --- a/doc/security.mdwn +++ b/doc/security.mdwn @@ -66,8 +66,7 @@ So it's best if only one person can ever directly write to those directories. ## setup files Setup files are not safe to keep in the same revision control repository -with the rest of the wiki. Just don't do it. [[ikiwiki.setup]] is *not* -used as the setup file for this wiki, BTW. +with the rest of the wiki. Just don't do it. ## page locking can be bypassed via direct commits diff --git a/doc/setup.mdwn b/doc/setup.mdwn index 174d6ceb9..9c67c2a6c 100644 --- a/doc/setup.mdwn +++ b/doc/setup.mdwn @@ -74,11 +74,16 @@ and `--rebuild` too. Get comfortable with its command line (see By now you should be getting tired of typing in all the command line options each time you change something in your wiki's setup. Time to introduce setup files. - -A sample setup file is [[ikiwiki.setup]]. Download it (or copy it from -`doc/ikiwiki.setup` in the ikiwiki sources), and edit it. Note that this -file should *not* be put in your wiki's directory with the rest of the -files. A good place to put it is in a ~/.ikiwiki/ subdirectory. + +To generate a setup file, use `ikiwiki --dumpsetup`. You can pass +all the options have you been including at the command line, and they +will be stored in the setup file. + + ikiwiki $SRCDIR $DESTDIR --url=http://example.org/~you/wiki/ --dumpsetup ikiwiki.setup + +Note that this file should *not* be put in your wiki's directory with +the rest of the files. A good place to put it is in a ~/.ikiwiki/ +subdirectory. Most of the options, like `wikiname` in the setup file are the same as ikiwiki's command line options (documented in [[usage]]. `srcdir` and @@ -91,12 +96,12 @@ will set everything up. ## Turn on additional features. -Now you have a basic wiki with a configuration file. Time to experiment +Now you have a basic wiki with a setup file. Time to experiment with ikiwiki's many features. Let's first enable a key wiki feature and set up [[CGI]] to allow editing the wiki from the web. Just edit ikiwiki.setup, uncomment the -block for the cgi wrapper, make sure the filename for the cgi wrapper +settings for the `cgi_wrapper`, make sure the filename for the cgi wrapper is ok, run `ikiwiki --setup ikiwiki.setup`, and you're done! There are lots of other configuration options in ikiwiki.setup that you @@ -113,7 +118,7 @@ on the revision control system you choose, the way this is done varies. Note that the .ikiwiki subdirectory is where ikiwiki keeps its state, and should be preserved, but not checked into revision control. -The new [[ikiwiki-makerepo]] command automates setting up a wiki in +The [[ikiwiki-makerepo]] command automates setting up a wiki in revision control. [[!toggle id=subversion text="Subversion"]] @@ -167,7 +172,7 @@ about using the git repositories. # remember the password you use in the next step and # substitute it for 'wikiKeyPass' in the get_passphrase() hook below # note the you should never generate two monotone keys with the same name - mtn genkey web@machine.company.com + mtn genkey web@example.com mtn db init --db=$REPOSITORY mv $SRCDIR $SRCDIR-old cd $SRCDIR-old @@ -188,11 +193,11 @@ about using the git repositories. Once your wiki is checked in to the revision control system, you should configure ikiwiki to use revision control. Edit your -ikiwiki.setup, and uncomment the lines for the revision control system +ikiwiki.setup, set `rcs` to the the revision control system you chose to use. Be sure to set `svnrepo` to $REPOSITORY, if using -subversion. Uncomment the block for the wrapper for your revision -control system, and configure the wrapper path in that block -appropriately (for Git, it should be `$REPOSITORY/hooks/post-update`). +subversion. Uncomment the configuration for the wrapper for your revision +control system, and configure the wrapper path appropriately +(for Git, it should be `$REPOSITORY/hooks/post-update`). Once it's all set up, run `ikiwiki --setup ikiwiki.setup` once more. Now you should be able to edit files in $SRCDIR, and use your revision diff --git a/doc/tips/nearlyfreespeech.mdwn b/doc/tips/nearlyfreespeech.mdwn index 435743bf9..6715f0c29 100644 --- a/doc/tips/nearlyfreespeech.mdwn +++ b/doc/tips/nearlyfreespeech.mdwn @@ -76,16 +76,15 @@ Here is an example of how I set up a wiki: mkdir ~/wiki cd ~/wiki - cp ~/ikiwiki/doc/ikiwiki.setup . cp -r ~/ikiwiki/doc/examples/blog/* . + ikiwiki -dumpsetup ikiwiki.setup nano ikiwiki.setup # Set destdir to /home/htdocs # Set srcdir to /home/private/wiki # Set url to http://yoursite.nfshost.com/ , set cgiurl likewise - # Uncomment the `rcs => "git"` line, and the cgi and git - # post-update wrapper blocks. - # Set the cgi wrapper path to /home/htdocs/ikiwiki.cgi - # Set the git wrapper path to /home/private/wiki.git/hooks/post-update + # Uncomment the `rcs => "git"` line. + # Set the cgi_wrapper path to /home/htdocs/ikiwiki.cgi + # Set the git_wrapper path to /home/private/wiki.git/hooks/post-update # Configure the rest to your liking and save the file. ikiwiki-makerepo git . ../wiki.git ikiwiki -setup ikiwiki.setup diff --git a/doc/todo/Make_example_setup_file_consistent.mdwn b/doc/todo/Make_example_setup_file_consistent.mdwn index 1fdff7b0f..54cc34af6 100644 --- a/doc/todo/Make_example_setup_file_consistent.mdwn +++ b/doc/todo/Make_example_setup_file_consistent.mdwn @@ -1,4 +1,4 @@ -The current example [[ikiwiki.setup]] file has a number of options included, but commented out. This is standard. Unfortunately there are two standards for the settings of those commented out options: +The current example ikiwiki.setup file has a number of options included, but commented out. This is standard. Unfortunately there are two standards for the settings of those commented out options: - Have the commented out options showing the default setting, or - Have the commented out options showing the most common alternate setting. @@ -26,4 +26,8 @@ What do others think? > I may not work on it myself, since I have some > [[interesting_ideas|online_configuration]] that would let ikiwiki > generate a setup file for you, rather than having to keep maintain the -> current example. --[[Joey]] +> current example. +> +> And.. [[done]].. setup files are now generated with `--dumpsetup`, based on +> the built-in defaults, and commented options show an example +> setting, not a default. --[[Joey]] diff --git a/doc/todo/cas_authentication.mdwn b/doc/todo/cas_authentication.mdwn index a6b428207..c8ffe7005 100644 --- a/doc/todo/cas_authentication.mdwn +++ b/doc/todo/cas_authentication.mdwn @@ -163,7 +163,7 @@ follows) ? +`/etc/ssl/certs/ca-certificates.crt` is sufficient). > It would be good to add commented-out examples of these to -> [[ikiwiki.setup]] as well. +> ikiwiki.setup as well. +This plugin is not enabled by default. It can not be used with other +authentication plugin, such as [[passwordauth]] or [[openid]]. diff --git a/doc/todo/color_plugin.mdwn b/doc/todo/color_plugin.mdwn index b82e0b704..ec246f9d6 100644 --- a/doc/todo/color_plugin.mdwn +++ b/doc/todo/color_plugin.mdwn @@ -198,6 +198,3 @@ comments are very welcome. --[[Paweł|ptecza]] + \[[!color ,#ff0000 "Default color text on red background"]] + +Foreground is missing, so the text has default color. - + - +This plugin is not enabled by default. You can do that in [[ikiwiki.setup]] - +file (hint: `add_plugins` variable). diff --git a/doc/todo/conditional_underlay_files.mdwn b/doc/todo/conditional_underlay_files.mdwn index 14ab4dac3..c578bceaf 100644 --- a/doc/todo/conditional_underlay_files.mdwn +++ b/doc/todo/conditional_underlay_files.mdwn @@ -12,7 +12,7 @@ I can see two good ways to implement this. Ideally, with [[conditional_text_based_on_ikiwiki_features]] available, ikiwiki could parse a page like conditionalpages.mdwn, which could contain a set of conditional-wrapped page names; that seems like the most elegant and -ikiwiki-like approach. Alternatively, [[/ikiwiki.setup]] could contain a +ikiwiki-like approach. Alternatively, ikiwiki.setup could contain a Perl-generated exclude option by default; that would work, but it seems hackish. diff --git a/doc/usage.mdwn b/doc/usage.mdwn index 2b104bcdb..511bb687a 100644 --- a/doc/usage.mdwn +++ b/doc/usage.mdwn @@ -55,13 +55,16 @@ These options control the mode that ikiwiki operates in. In setup mode, ikiwiki reads the config file, which is really a perl program that can call ikiwiki internal functions. - [[ikiwiki.setup]] is an example of such a config file. - The default action when --setup is specified is to automatically generate wrappers for a wiki based on data in a config file, and rebuild the wiki. If you only want to build any changed pages, you can use --refresh with --setup. +* --dumpsetup configfile + + Causes ikiwiki to write to the specified config file, dumping out + its current configuration. + * --wrappers If used with --setup --refresh, this makes it also update any configured diff --git a/doc/w3mmode/ikiwiki.setup b/doc/w3mmode/ikiwiki.setup index e08856158..5f5cbbff9 100644 --- a/doc/w3mmode/ikiwiki.setup +++ b/doc/w3mmode/ikiwiki.setup @@ -17,19 +17,13 @@ use IkiWiki::Setup::Standard { cgiurl => 'ikiwiki.cgi', rcs => "", - wrappers => [ - { - # The cgi wrapper. - cgi => 1, - # The wrapper must be put in ~/.ikiwiki/wrappers/, since - # ikiwiki-w3m.cgi only looks in this one location. - # The wrapper can be given any name as long as it's - # in that directory. - wrapper => "$ENV{HOME}/.ikiwiki/wrappers/ikiwiki.cgi", - wrappermode => "0755", - }, - ], - + # The wrapper must be put in ~/.ikiwiki/wrappers/, since + # ikiwiki-w3m.cgi only looks in this one location. + # The wrapper can be given any name as long as it's + # in that directory. + cgi_wrapper => "$ENV{HOME}/.ikiwiki/wrappers/ikiwiki.cgi", + cgi_wrappermode => "0755", + add_plugins => [qw{anonok}], rss => 1, atom => 1, diff --git a/ikiwiki.in b/ikiwiki.in index 3bb881c43..0390df7e1 100755 --- a/ikiwiki.in +++ b/ikiwiki.in @@ -20,6 +20,7 @@ sub getconfig () { #{{{ Getopt::Long::Configure('pass_through'); GetOptions( "setup|s=s" => \$config{setup}, + "dumpsetup|s=s" => \$config{dumpsetup}, "wikiname=s" => \$config{wikiname}, "verbose|v!" => \$config{verbose}, "syslog!" => \$config{syslog}, @@ -27,7 +28,7 @@ sub getconfig () { #{{{ "refresh!" => \$config{refresh}, "post-commit" => \$config{post_commit}, "render=s" => \$config{render}, - "wrappers!" => \$config{wrappers}, + "wrappers!" => \$config{genwrappers}, "usedirs!" => \$config{usedirs}, "prefix-directives!" => \$config{prefix_directives}, "getctime" => \$config{getctime}, @@ -45,7 +46,6 @@ sub getconfig () { #{{{ "adminemail=s" => \$config{adminemail}, "timeformat=s" => \$config{timeformat}, "sslcookie!" => \$config{sslcookie}, - "httpauth!" => \$config{httpauth}, "userdir=s" => \$config{userdir}, "htmlext=s" => \$config{htmlext}, "libdir=s" => \$config{libdir}, @@ -68,14 +68,11 @@ sub getconfig () { #{{{ $config{wrappermode}=possibly_foolish_untaint($_[1]) }, "plugin=s@" => sub { - push @{$config{plugin}}, $_[1]; + push @{$config{add_plugins}}, $_[1]; }, "disable-plugin=s@" => sub { push @{$config{disable_plugins}}, $_[1]; }, - "pingurl=s" => sub { - push @{$config{pingurl}}, $_[1]; - }, "set=s" => sub { my ($var, $val)=split('=', $_[1], 2); if (! defined $var || ! defined $val) { @@ -91,10 +88,14 @@ sub getconfig () { #{{{ if (! $config{setup} && ! $config{render}) { loadplugins(); - usage() unless @ARGV == 2; - $config{srcdir} = possibly_foolish_untaint(shift @ARGV); - $config{destdir} = possibly_foolish_untaint(shift @ARGV); - checkconfig(); + if (@ARGV == 2) { + $config{srcdir} = possibly_foolish_untaint(shift @ARGV); + $config{destdir} = possibly_foolish_untaint(shift @ARGV); + checkconfig(); + } + else { + usage() unless $config{dumpsetup}; + } } } else { @@ -114,7 +115,47 @@ sub main () { #{{{ if ($config{setup}) { require IkiWiki::Setup; - setup(); + IkiWiki::Setup::load($config{setup}); + if (@{$config{wrappers}} && + ! $config{render} && ! $config{dumpsetup} && + (! $config{refresh} || $config{genwrappers})) { + debug(gettext("generating wrappers..")); + require IkiWiki::Wrapper; + my %origconfig=(%config); + my @wrappers=@{$config{wrappers}}; + delete $config{wrappers}; + delete $config{genwrappers}; + foreach my $wrapper (@wrappers) { + %config=(%origconfig, + rebuild => 0, + verbose => 0, + %{$wrapper}, + ); + checkconfig(); + if (! $config{cgi} && ! $config{post_commit}) { + $config{post_commit}=1; + } + gen_wrapper(); + } + %config=(%origconfig); + } + + # setup implies a wiki rebuild by default + if (! $config{refresh}) { + $config{rebuild}=1; + } + + # ignore syslog setting from setup file + # while doing initial setup + $config{syslog}=0 unless $config{dumpsetup}; + + loadplugins(); + checkconfig(); + } + + if ($config{dumpsetup}) { + require IkiWiki::Setup; + IkiWiki::Setup::dump($config{dumpsetup}); } elsif ($config{wrapper}) { lockwiki(); @@ -136,12 +177,19 @@ sub main () { #{{{ # do nothing } else { + if (! $config{refresh}) { + debug(gettext("rebuilding wiki..")); + } + else { + debug(gettext("refreshing wiki..")); + } lockwiki(); loadindex(); require IkiWiki::Render; rcs_update(); refresh(); saveindex(); + debug(gettext("done")); } } #}}} diff --git a/po/ikiwiki.pot b/po/ikiwiki.pot index b6e2dc68c..50c2fb87d 100644 --- a/po/ikiwiki.pot +++ b/po/ikiwiki.pot @@ -8,7 +8,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2008-07-25 16:16-0400\n" +"POT-Creation-Date: 2008-07-26 22:24-0400\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" @@ -50,7 +50,7 @@ msgid "%s is not an editable page" msgstr "" #: ../IkiWiki/CGI.pm:437 ../IkiWiki/Plugin/brokenlinks.pm:24 -#: ../IkiWiki/Plugin/inline.pm:261 ../IkiWiki/Plugin/opendiscussion.pm:17 +#: ../IkiWiki/Plugin/inline.pm:306 ../IkiWiki/Plugin/opendiscussion.pm:17 #: ../IkiWiki/Plugin/orphans.pm:28 ../IkiWiki/Render.pm:78 #: ../IkiWiki/Render.pm:148 msgid "discussion" @@ -71,122 +71,122 @@ msgstr "" msgid "You are banned." msgstr "" -#: ../IkiWiki/CGI.pm:758 ../IkiWiki/CGI.pm:759 ../IkiWiki.pm:788 +#: ../IkiWiki/CGI.pm:758 ../IkiWiki/CGI.pm:759 ../IkiWiki.pm:1086 msgid "Error" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:57 +#: ../IkiWiki/Plugin/aggregate.pm:76 msgid "Aggregation triggered via web." msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:66 +#: ../IkiWiki/Plugin/aggregate.pm:85 msgid "Nothing to do right now, all feeds are up-to-date!" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:193 +#: ../IkiWiki/Plugin/aggregate.pm:212 #, perl-format msgid "missing %s parameter" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:227 +#: ../IkiWiki/Plugin/aggregate.pm:246 msgid "new feed" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:241 +#: ../IkiWiki/Plugin/aggregate.pm:260 msgid "posts" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:243 +#: ../IkiWiki/Plugin/aggregate.pm:262 msgid "new" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:406 +#: ../IkiWiki/Plugin/aggregate.pm:425 #, perl-format msgid "expiring %s (%s days old)" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:413 +#: ../IkiWiki/Plugin/aggregate.pm:432 #, perl-format msgid "expiring %s" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:440 +#: ../IkiWiki/Plugin/aggregate.pm:459 #, perl-format msgid "processed ok at %s" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:444 +#: ../IkiWiki/Plugin/aggregate.pm:463 #, perl-format msgid "checking feed %s ..." msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:449 +#: ../IkiWiki/Plugin/aggregate.pm:468 #, perl-format msgid "could not find feed at %s" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:464 +#: ../IkiWiki/Plugin/aggregate.pm:483 msgid "feed not found" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:475 +#: ../IkiWiki/Plugin/aggregate.pm:494 #, perl-format msgid "(invalid UTF-8 stripped from feed)" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:481 +#: ../IkiWiki/Plugin/aggregate.pm:500 #, perl-format msgid "(feed entities escaped)" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:487 +#: ../IkiWiki/Plugin/aggregate.pm:506 msgid "feed crashed XML::Feed!" msgstr "" -#: ../IkiWiki/Plugin/aggregate.pm:561 +#: ../IkiWiki/Plugin/aggregate.pm:580 #, perl-format msgid "creating new page %s" msgstr "" -#: ../IkiWiki/Plugin/amazon_s3.pm:30 +#: ../IkiWiki/Plugin/amazon_s3.pm:31 msgid "deleting bucket.." msgstr "" -#: ../IkiWiki/Plugin/amazon_s3.pm:37 ../IkiWiki/Setup.pm:117 +#: ../IkiWiki/Plugin/amazon_s3.pm:38 ../ikiwiki.in:188 msgid "done" msgstr "" -#: ../IkiWiki/Plugin/amazon_s3.pm:46 +#: ../IkiWiki/Plugin/amazon_s3.pm:93 #, perl-format msgid "Must specify %s" msgstr "" -#: ../IkiWiki/Plugin/amazon_s3.pm:85 +#: ../IkiWiki/Plugin/amazon_s3.pm:132 msgid "Failed to create bucket in S3: " msgstr "" -#: ../IkiWiki/Plugin/amazon_s3.pm:170 +#: ../IkiWiki/Plugin/amazon_s3.pm:217 msgid "Failed to save file to S3: " msgstr "" -#: ../IkiWiki/Plugin/amazon_s3.pm:192 +#: ../IkiWiki/Plugin/amazon_s3.pm:239 msgid "Failed to delete file from S3: " msgstr "" -#: ../IkiWiki/Plugin/attachment.pm:22 +#: ../IkiWiki/Plugin/attachment.pm:34 #, perl-format msgid "there is already a page named %s" msgstr "" -#: ../IkiWiki/Plugin/attachment.pm:41 +#: ../IkiWiki/Plugin/attachment.pm:53 msgid "prohibited by allowed_attachments" msgstr "" -#: ../IkiWiki/Plugin/attachment.pm:144 +#: ../IkiWiki/Plugin/attachment.pm:156 msgid "bad attachment filename" msgstr "" -#: ../IkiWiki/Plugin/attachment.pm:186 +#: ../IkiWiki/Plugin/attachment.pm:198 msgid "attachment upload" msgstr "" @@ -262,33 +262,33 @@ msgstr "" msgid "failed to determine size of image %s" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:47 +#: ../IkiWiki/Plugin/inline.pm:89 msgid "Must specify url to wiki with --url when using --rss or --atom" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:101 +#: ../IkiWiki/Plugin/inline.pm:146 msgid "missing pages parameter" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:149 +#: ../IkiWiki/Plugin/inline.pm:194 #, perl-format msgid "unknown sort type %s" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:220 +#: ../IkiWiki/Plugin/inline.pm:265 msgid "Add a new post titled:" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:236 +#: ../IkiWiki/Plugin/inline.pm:281 #, perl-format msgid "nonexistant template %s" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:269 ../IkiWiki/Render.pm:82 +#: ../IkiWiki/Plugin/inline.pm:314 ../IkiWiki/Render.pm:82 msgid "Discussion" msgstr "" -#: ../IkiWiki/Plugin/inline.pm:506 +#: ../IkiWiki/Plugin/inline.pm:551 msgid "RPC::XML::Client not found, not pinging" msgstr "" @@ -301,11 +301,11 @@ msgstr "" msgid "%s is locked by %s and cannot be edited" msgstr "" -#: ../IkiWiki/Plugin/mdwn.pm:28 +#: ../IkiWiki/Plugin/mdwn.pm:40 msgid "multimarkdown is enabled, but Text::MultiMarkdown is not installed" msgstr "" -#: ../IkiWiki/Plugin/mdwn.pm:51 +#: ../IkiWiki/Plugin/mdwn.pm:63 #, perl-format msgid "failed to load Markdown.pm perl module (%s) or /usr/bin/markdown (%s)" msgstr "" @@ -322,11 +322,11 @@ msgstr "" msgid "redir cycle is not allowed" msgstr "" -#: ../IkiWiki/Plugin/mirrorlist.pm:23 +#: ../IkiWiki/Plugin/mirrorlist.pm:35 msgid "Mirrors" msgstr "" -#: ../IkiWiki/Plugin/mirrorlist.pm:23 +#: ../IkiWiki/Plugin/mirrorlist.pm:35 msgid "Mirror" msgstr "" @@ -334,11 +334,15 @@ msgstr "" msgid "more" msgstr "" -#: ../IkiWiki/Plugin/openid.pm:45 +#: ../IkiWiki/Plugin/norcs.pm:55 +msgid "getctime not implemented" +msgstr "" + +#: ../IkiWiki/Plugin/openid.pm:57 msgid "Log in with" msgstr "" -#: ../IkiWiki/Plugin/openid.pm:48 +#: ../IkiWiki/Plugin/openid.pm:60 msgid "Get an OpenID" msgstr "" @@ -350,31 +354,31 @@ msgstr "" msgid "bad or missing template" msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:223 +#: ../IkiWiki/Plugin/passwordauth.pm:243 msgid "Account creation successful. Now you can Login." msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:226 +#: ../IkiWiki/Plugin/passwordauth.pm:246 msgid "Error creating account." msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:233 +#: ../IkiWiki/Plugin/passwordauth.pm:253 msgid "No email address, so cannot email password reset instructions." msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:265 +#: ../IkiWiki/Plugin/passwordauth.pm:287 msgid "Failed to send mail" msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:267 +#: ../IkiWiki/Plugin/passwordauth.pm:289 msgid "You have been mailed password reset instructions." msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:302 +#: ../IkiWiki/Plugin/passwordauth.pm:324 msgid "incorrect password reset url" msgstr "" -#: ../IkiWiki/Plugin/passwordauth.pm:305 +#: ../IkiWiki/Plugin/passwordauth.pm:327 msgid "password reset denied" msgstr "" @@ -382,21 +386,21 @@ msgstr "" msgid "Ping received." msgstr "" -#: ../IkiWiki/Plugin/pinger.pm:37 +#: ../IkiWiki/Plugin/pinger.pm:49 msgid "requires 'from' and 'to' parameters" msgstr "" -#: ../IkiWiki/Plugin/pinger.pm:42 +#: ../IkiWiki/Plugin/pinger.pm:54 #, perl-format msgid "Will ping %s" msgstr "" -#: ../IkiWiki/Plugin/pinger.pm:45 +#: ../IkiWiki/Plugin/pinger.pm:57 #, perl-format msgid "Ignoring ping directive for wiki %s (this wiki is %s)" msgstr "" -#: ../IkiWiki/Plugin/pinger.pm:61 +#: ../IkiWiki/Plugin/pinger.pm:73 msgid "LWP not found, not pinging" msgstr "" @@ -476,23 +480,23 @@ msgstr "" msgid "%A night" msgstr "" -#: ../IkiWiki/Plugin/prettydate.pm:78 +#: ../IkiWiki/Plugin/prettydate.pm:96 msgid "at teatime on %A" msgstr "" -#: ../IkiWiki/Plugin/prettydate.pm:82 +#: ../IkiWiki/Plugin/prettydate.pm:100 msgid "at midnight" msgstr "" -#: ../IkiWiki/Plugin/prettydate.pm:85 +#: ../IkiWiki/Plugin/prettydate.pm:103 msgid "at noon on %A" msgstr "" -#: ../IkiWiki/Plugin/recentchanges.pm:76 +#: ../IkiWiki/Plugin/recentchanges.pm:95 msgid "missing page" msgstr "" -#: ../IkiWiki/Plugin/recentchanges.pm:78 +#: ../IkiWiki/Plugin/recentchanges.pm:97 #, perl-format msgid "The page %s does not exist." msgstr "" @@ -576,17 +580,17 @@ msgstr "" msgid "update for rename of %s to %s" msgstr "" -#: ../IkiWiki/Plugin/search.pm:20 +#: ../IkiWiki/Plugin/search.pm:32 #, perl-format msgid "Must specify %s when using the search plugin" msgstr "" -#: ../IkiWiki/Plugin/search.pm:166 +#: ../IkiWiki/Plugin/search.pm:178 #, perl-format msgid "need Digest::SHA1 to index %s" msgstr "" -#: ../IkiWiki/Plugin/search.pm:201 +#: ../IkiWiki/Plugin/search.pm:213 msgid "search" msgstr "" @@ -688,10 +692,6 @@ msgstr "" msgid "failed to generate image from code" msgstr "" -#: ../IkiWiki/Rcs/Stub.pm:96 -msgid "getctime not implemented" -msgstr "" - #: ../IkiWiki/Render.pm:276 ../IkiWiki/Render.pm:297 #, perl-format msgid "skipping bad filename %s" @@ -739,23 +739,11 @@ msgstr "" #. translators: The first parameter is a filename, and the second #. translators: is a (probably not translated) error message. -#: ../IkiWiki/Setup.pm:25 +#: ../IkiWiki/Setup.pm:23 #, perl-format msgid "cannot read %s: %s" msgstr "" -#: ../IkiWiki/Setup.pm:58 -msgid "generating wrappers.." -msgstr "" - -#: ../IkiWiki/Setup.pm:107 -msgid "rebuilding wiki.." -msgstr "" - -#: ../IkiWiki/Setup.pm:110 -msgid "refreshing wiki.." -msgstr "" - #: ../IkiWiki/Wrapper.pm:16 #, perl-format msgid "%s doesn't seem to be executable" @@ -792,23 +780,31 @@ msgstr "" msgid "usage: ikiwiki [options] source dest" msgstr "" -#: ../ikiwiki.in:82 +#: ../ikiwiki.in:79 msgid "usage: --set var=value" msgstr "" -#: ../IkiWiki.pm:126 +#: ../ikiwiki.in:118 +msgid "generating wrappers.." +msgstr "" + +#: ../ikiwiki.in:177 +msgid "rebuilding wiki.." +msgstr "" + +#: ../ikiwiki.in:180 +msgid "refreshing wiki.." +msgstr "" + +#: ../IkiWiki.pm:410 msgid "Must specify url to wiki with --url when using --cgi" msgstr "" -#. translators: The first parameter is a -#. translators: preprocessor directive name, -#. translators: the second a page name, the -#. translators: third a number. -#: ../IkiWiki.pm:771 +#: ../IkiWiki.pm:1069 #, perl-format -msgid "%s preprocessing loop detected on %s at depth %i" +msgid "preprocessing loop detected on %s at depth %i" msgstr "" -#: ../IkiWiki.pm:1219 +#: ../IkiWiki.pm:1557 msgid "yes" msgstr "" diff --git a/t/syntax.t b/t/syntax.t index 01346a338..694bb01df 100755 --- a/t/syntax.t +++ b/t/syntax.t @@ -6,7 +6,7 @@ use Test::More; my @progs="ikiwiki.in"; my @libs="IkiWiki.pm"; # monotone, external, amazon_s3 skipped since they need perl modules -push @libs, map { chomp; $_ } `find IkiWiki -type f -name \\*.pm | grep -v IkiWiki/Rcs/monotone.pm | grep -v IkiWiki/Plugin/external.pm | grep -v IkiWiki/Plugin/amazon_s3.pm`; +push @libs, map { chomp; $_ } `find IkiWiki -type f -name \\*.pm | grep -v monotone.pm | grep -v external.pm | grep -v amazon_s3.pm`; plan(tests => (@progs + @libs));