X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/0850cde5a6a77898e6ae88173a34bc641414deb9..885e4b607ae6cdcd48c792ce45c8b7faa29a76fb:/IkiWiki/Plugin/amazon_s3.pm?ds=inline

diff --git a/IkiWiki/Plugin/amazon_s3.pm b/IkiWiki/Plugin/amazon_s3.pm
index 0613d4357..93c10b629 100644
--- a/IkiWiki/Plugin/amazon_s3.pm
+++ b/IkiWiki/Plugin/amazon_s3.pm
@@ -16,11 +16,81 @@ BEGIN {
 	}
 };
 
-sub import { #{{{
+sub import {
+	hook(type => "getopt", id => "amazon_s3", call => \&getopt);
+	hook(type => "getsetup", id => "amazon_s3", call => \&getsetup);
 	hook(type => "checkconfig", id => "amazon_s3", call => \&checkconfig);
-} # }}}
+}
+
+sub getopt () {
+        eval q{use Getopt::Long};
+        error($@) if $@;
+        Getopt::Long::Configure('pass_through');
+        GetOptions("delete-bucket" => sub {
+		my $bucket=getbucket();
+		debug(gettext("deleting bucket.."));
+		my $resp = $bucket->list_all or die $bucket->err . ": " . $bucket->errstr;
+		foreach my $key (@{$resp->{keys}}) {
+			debug("\t".$key->{key});
+			$bucket->delete_key($key->{key}) or die $bucket->err . ": " . $bucket->errstr;
+		}
+		$bucket->delete_bucket or die $bucket->err . ": " . $bucket->errstr;
+		debug(gettext("done"));
+		exit(0);
+	});
+}
 
-sub checkconfig { #{{{
+sub getsetup () {
+	return
+		plugin => {
+			safe => 0,
+			rebuild => 0,
+		},
+		amazon_s3_key_id => {
+			type => "string",
+			example => "XXXXXXXXXXXXXXXXXXXX",
+			description => "public access key id",
+			safe => 1,
+			rebuild => 0,
+		},
+		amazon_s3_key_id => {
+			type => "string",
+			example => "$ENV{HOME}/.s3_key",
+			description => "file holding secret key (must not be readable by others!)",
+			safe => 0, # ikiwiki reads this file
+			rebuild => 0,
+		},
+		amazon_s3_bucket => {
+			type => "string",
+			example => "mywiki",
+			description => "globally unique name of bucket to store wiki in",
+			safe => 1,
+			rebuild => 1,
+		},
+		amazon_s3_prefix => {
+			type => "string",
+			example => "wiki/",
+			description => "a prefix to prepend to each page name",
+			safe => 1,
+			rebuild => 1,
+		},
+		amazon_s3_location => {
+			type => "string",
+			example => "EU",
+			description => "which S3 datacenter to use (leave blank for default)",
+			safe => 1,
+			rebuild => 1,
+		},
+		amazon_s3_dupindex => {
+			type => "boolean",
+			example => 0,
+			description => "store each index file twice? (allows urls ending in \"/index.html\" and \"/\")",
+			safe => 1,
+			rebuild => 1,
+		},
+}
+
+sub checkconfig {
 	foreach my $field (qw{amazon_s3_key_id amazon_s3_key_file
 	                      amazon_s3_bucket}) {
 		if (! exists $config{$field} || ! defined $config{$field}) {
@@ -31,11 +101,11 @@ sub checkconfig { #{{{
 	    ! defined $config{amazon_s3_prefix}) {
 	    $config{amazon_s3_prefix}="wiki/";
 	}
-} #}}}
+}
 
 {
 my $bucket;
-sub getbucket { #{{{
+sub getbucket {
 	return $bucket if defined $bucket;
 	
 	open(IN, "<", $config{amazon_s3_key_file}) || error($config{amazon_s3_key_file}.": ".$!);
@@ -68,7 +138,30 @@ sub getbucket { #{{{
 	}
 
 	return $bucket;
-} #}}}
+}
+}
+
+# Given a file, return any S3 keys associated with it.
+sub file2keys ($) {
+	my $file=shift;
+
+	my @keys;
+	if ($file =~ /^\Q$config{destdir}\/\E(.*)/) {
+		push @keys, $config{amazon_s3_prefix}.$1;
+
+		# Munge foo/index.html to foo/
+		if ($keys[0]=~/(^|.*\/)index.$config{htmlext}$/) {
+			# A duplicate might need to be stored under the
+			# unmunged name too.
+			if (!$config{usedirs} || $config{amazon_s3_dupindex}) {
+				push @keys, $1;
+			}
+			else {
+				@keys=($1);
+			}
+		}
+	}
+	return @keys;
 }
 
 package IkiWiki;
@@ -76,7 +169,7 @@ use File::MimeInfo;
 use Encode;
 
 # This is a wrapper around the real writefile.
-sub writefile ($$$;$$) { #{{{
+sub writefile ($$$;$$) {
         my $file=shift;
         my $destdir=shift;
         my $content=shift;
@@ -85,21 +178,11 @@ sub writefile ($$$;$$) { #{{{
 
 	# First, write the file to disk.
 	my $ret=$IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::writefile'}->($file, $destdir, $content, $binary, $writer);
-
-	# Now, determine if the file was written to the destdir.
-	# writefile might be used for writing files elsewhere.
-	# Also, $destdir might be set to a subdirectory of the destdir.
-	my $key;
-	if ($destdir eq $config{destdir}) {
-		$key=$file;
-	}
-	elsif ("$destdir/$file" =~ /^\Q$config{destdir}\/\E(.*)/) {
-		$key=$1;
-	}
+		
+	my @keys=IkiWiki::Plugin::amazon_s3::file2keys("$destdir/$file");
 
 	# Store the data in S3.
-	if (defined $key) {
-		$key=$config{amazon_s3_prefix}.$key;
+	if (@keys) {
 		my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
 
 		# The http layer tries to downgrade utf-8
@@ -108,69 +191,62 @@ sub writefile ($$$;$$) { #{{{
 		# so force convert it to bytes.
 		$content=encode_utf8($content) if defined $content;
 
-		if (defined $content && ! length $content) {
-			# S3 doesn't allow storing empty files!
-			$content=" ";
-		}
-		
 		my %opts=(
 			acl_short => 'public-read',
 			content_type => mimetype("$destdir/$file"),
 		);
-		my $res;
-		if (! $writer) {
-			$res=$bucket->add_key($key, $content, \%opts);
-		}
-		else {
-			# read back in the file that the writer emitted
-			$res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
-		}
-		if ($res && $key=~/(^|\/)index.$config{htmlext}$/) {
-			# index.html files are a special case. Since S3 is
-			# not a normal web server, it won't serve up
-			# foo/index.html when foo/ is requested. So the
-			# file has to be stored twice. (This is bad news
-			# when usedirs is enabled!)
-			$key=~s/index.$config{htmlext}$//;
+
+		# If there are multiple keys to write, data is sent
+		# multiple times.
+		# TODO: investigate using the new copy operation.
+		#       (It may not be robust enough.)
+		foreach my $key (@keys) {
+			my $res;
 			if (! $writer) {
 				$res=$bucket->add_key($key, $content, \%opts);
 			}
 			else {
-				$res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
+				# This test for empty files is a workaround
+				# for this bug:
+				# http://rt.cpan.org//Ticket/Display.html?id=35731
+				if (-z "$destdir/$file") {
+					$res=$bucket->add_key($key, "", \%opts);
+				}
+				else {
+					# read back in the file that the writer emitted
+					$res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
+				}
+			}
+			if (! $res) {
+				error(gettext("Failed to save file to S3: ").
+					$bucket->err.": ".$bucket->errstr."\n");
 			}
-		}
-		if (! $res) {
-			error(gettext("Failed to save file to S3: ").
-				$bucket->err.": ".$bucket->errstr."\n");
 		}
 	}
 
 	return $ret;
-} #}}}
+}
 
 # This is a wrapper around the real prune.
-sub prune ($) { #{{{
+sub prune ($) {
 	my $file=shift;
 
-	# If a file in the destdir is being pruned, need to delete it out
-	# of S3 as well.
-	if ($file =~ /^\Q$config{destdir}\/\E(.*)/) {
-		my $key=$config{amazon_s3_prefix}.$1;
-		print STDERR "wrapped prune ($key)\n";
+	my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file);
+
+	# Prune files out of S3 too.
+	if (@keys) {
 		my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
-		my $res=$bucket->delete_key($key);
-		if ($res && $key=~/(^|\/)index.$config{htmlext}$/) {
-			# index.html special case: Delete other file too
-			$key=~s/index.$config{htmlext}$//;
-			$res=$bucket->delete_key($key);
-		}
-		if (! $res) {
-			error(gettext("Failed to delete file from S3: ").
-				$bucket->err.": ".$bucket->errstr."\n");
+
+		foreach my $key (@keys) {
+			my $res=$bucket->delete_key($key);
+			if (! $res) {
+				error(gettext("Failed to delete file from S3: ").
+					$bucket->err.": ".$bucket->errstr."\n");
+			}
 		}
 	}
 
 	return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file);
-} #}}}
+}
 
 1