2 package IkiWiki::Plugin::amazon_s3;
5 no warnings 'redefine';
11 # Store references to real subs before overriding them.
14 foreach my $sub (qw{IkiWiki::writefile IkiWiki::prune}) {
20 hook(type => "getopt", id => "amazon_s3", call => \&getopt);
21 hook(type => "getsetup", id => "amazon_s3", call => \&getsetup);
22 hook(type => "checkconfig", id => "amazon_s3", call => \&checkconfig);
26 eval q{use Getopt::Long};
28 Getopt::Long::Configure('pass_through');
29 GetOptions("delete-bucket" => sub {
30 my $bucket=getbucket();
31 debug(gettext("deleting bucket.."));
32 my $resp = $bucket->list_all or die $bucket->err . ": " . $bucket->errstr;
33 foreach my $key (@{$resp->{keys}}) {
34 debug("\t".$key->{key});
35 $bucket->delete_key($key->{key}) or die $bucket->err . ": " . $bucket->errstr;
37 $bucket->delete_bucket or die $bucket->err . ": " . $bucket->errstr;
38 debug(gettext("done"));
51 example => "XXXXXXXXXXXXXXXXXXXX",
52 description => "public access key id",
58 example => "$ENV{HOME}/.s3_key",
59 description => "file holding secret key (must not be readable by others!)",
60 safe => 0, # ikiwiki reads this file
66 description => "globally unique name of bucket to store wiki into",
73 description => "a prefix to prepend to each page name",
77 amazon_s3_location => {
80 description => "which S3 datacenter to use (leave blank for default)",
84 amazon_s3_dupindex => {
87 description => "store each index file twice? (allows urls ending in \"/index.html\" and \"/\")",
94 foreach my $field (qw{amazon_s3_key_id amazon_s3_key_file
96 if (! exists $config{$field} || ! defined $config{$field}) {
97 error(sprintf(gettext("Must specify %s"), $field));
100 if (! exists $config{amazon_s3_prefix} ||
101 ! defined $config{amazon_s3_prefix}) {
102 $config{amazon_s3_prefix}="wiki/";
109 return $bucket if defined $bucket;
111 open(IN, "<", $config{amazon_s3_key_file}) || error($config{amazon_s3_key_file}.": ".$!);
116 my $s3=Net::Amazon::S3->new({
117 aws_access_key_id => $config{amazon_s3_key_id},
118 aws_secret_access_key => $key,
122 # make sure the bucket exists
123 if (exists $config{amazon_s3_location}) {
124 $bucket=$s3->add_bucket({
125 bucket => $config{amazon_s3_bucket},
126 location_constraint => $config{amazon_s3_location},
130 $bucket=$s3->add_bucket({
131 bucket => $config{amazon_s3_bucket},
136 error(gettext("Failed to create bucket inside S3: ").
137 $s3->err.": ".$s3->errstr."\n");
144 # Given a file, return any S3 keys associated with it.
149 if ($file =~ /^\Q$config{destdir}\/\E(.*)/) {
150 push @keys, $config{amazon_s3_prefix}.$1;
152 # Munge foo/index.html to foo/
153 if ($keys[0]=~/(^|.*\/)index.$config{htmlext}$/) {
154 # A duplicate might need to be stored under the
156 if (!$config{usedirs} || $config{amazon_s3_dupindex}) {
171 # This is a wrapper around the real writefile.
172 sub writefile ($$$;$$) {
179 # First, write the file to disk.
180 my $ret=$IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::writefile'}->($file, $destdir, $content, $binary, $writer);
182 my @keys=IkiWiki::Plugin::amazon_s3::file2keys("$destdir/$file");
184 # Store the data in S3.
186 my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
188 # The http layer tries to downgrade utf-8
189 # content, but that can fail (see
190 # http://rt.cpan.org/Ticket/Display.html?id=35710),
191 # so force convert it to bytes.
192 $content=encode_utf8($content) if defined $content;
195 acl_short => 'public-read',
196 content_type => mimetype("$destdir/$file"),
199 # If there are multiple keys to write, data is sent
201 # TODO: investigate using the new copy operation.
202 # (It may not be robust enough.)
203 foreach my $key (@keys) {
206 $res=$bucket->add_key($key, $content, \%opts);
209 # This test for empty files is a workaround
211 # http://rt.cpan.org//Ticket/Display.html?id=35731
212 if (-z "$destdir/$file") {
213 $res=$bucket->add_key($key, "", \%opts);
216 # read back in the file that the writer emitted
217 $res=$bucket->add_key_filename($key, "$destdir/$file", \%opts);
221 error(gettext("Failed to save file into S3: ").
222 $bucket->err.": ".$bucket->errstr."\n");
230 # This is a wrapper around the real prune.
234 my @keys=IkiWiki::Plugin::amazon_s3::file2keys($file);
236 # Prune files out of S3 too.
238 my $bucket=IkiWiki::Plugin::amazon_s3::getbucket();
240 foreach my $key (@keys) {
241 my $res=$bucket->delete_key($key);
243 error(gettext("Failed to delete file inside S3: ").
244 $bucket->err.": ".$bucket->errstr."\n");
249 return $IkiWiki::Plugin::amazon_s3::subs{'IkiWiki::prune'}->($file);