Subject: | Limit rate in Net::Amazon::S3 |
Kostas Chatzikokolakis <kostas@chatzi.org> says:
I'm using Net::Amazon::S3 in my projects, thanks a lot for this nice
package.
Yesterday I was having some issues with Amazon S3. For some reason, when
uploading big files (using add_key_filename) Amazon was dropping the
connection sending only a "500 EOF" response. After some experiments I
found that limiting the upload speed to about 700KBytes - 1MByte per
second totaly solved the problem! So I added a "limit_rate" option to
Net::Amazon::S3 that allows to set a maximum upload rate when using
add_key_filename. I do that by checking the elapsed time every 100KBytes
and sleeping if uploading faster than we should.
I attach the patch in case you or somebody else has the same problem.
Subject: | limit_rate.patch |
diff -Naur Net-Amazon-S3-0.41/lib/Net/Amazon/S3/Bucket.pm Net-Amazon-S3-0.41-new/lib/Net/Amazon/S3/Bucket.pm
--- Net-Amazon-S3-0.41/lib/Net/Amazon/S3/Bucket.pm 2007-11-30 10:12:42.000000000 +0100
+++ Net-Amazon-S3-0.41-new/lib/Net/Amazon/S3/Bucket.pm 2008-02-05 07:54:59.000000000 +0100
@@ -121,7 +121,7 @@
if ( ref($value) eq 'SCALAR' ) {
$conf->{'Content-Length'} ||= -s $$value;
- $value = _content_sub($$value);
+ $value = $self->_content_sub($$value);
} else {
$conf->{'Content-Length'} ||= length $value;
}
@@ -453,25 +453,31 @@
sub errstr { $_[0]->account->errstr }
sub _content_sub {
- my $filename = shift;
+ my ($self, $filename) = @_;
my $stat = stat($filename);
- my $remaining = $stat->size;
+ my $total = $stat->size;
my $blksize = $stat->blksize || 4096;
croak "$filename not a readable file with fixed size"
- unless -r $filename and $remaining;
+ unless -r $filename and $total;
open DATA, "< $filename" or croak "Could not open $filename: $!";
+ my $limit = $self->account->limit_rate;
+ my $sent = 0;
+ my $next_check = 102400;
+ my $start_time = time;
+
return sub {
my $buffer;
- # warn "read remaining $remaining";
- unless ( my $read = read( DATA, $buffer, $blksize ) ) {
+ # warn "read sent $sent";
+ my $read;
+ unless ( $read = read( DATA, $buffer, $blksize ) ) {
-# warn "read $read buffer $buffer remaining $remaining";
+# warn "read $read buffer $buffer sent $sent";
croak
- "Error while reading upload content $filename ($remaining remaining) $!"
- if $! and $remaining;
+ "Error while reading upload content $filename ($sent sent) $!"
+ if $! && $sent < $total;
# otherwise, we found EOF
close DATA
@@ -479,7 +485,19 @@
$buffer ||= ''
; # LWP expects an emptry string on finish, read returns 0
}
- $remaining -= length($buffer);
+ $sent += $read;
+
+ # If a rate limit is set, check the elapsed time every 100 KBytes
+ # If the elapsed time is less that what imposed by the limit, sleep to cover the difference
+ #
+ if($limit && $sent > $next_check) {
+ my $elapsed = time - $start_time;
+ my $min = int($sent / $limit);
+ sleep $min - $elapsed if $elapsed < $min;
+
+ $next_check += 102400; # next check after another 100 KBytes
+ }
+
return $buffer;
};
}
diff -Naur Net-Amazon-S3-0.41/lib/Net/Amazon/S3.pm Net-Amazon-S3-0.41-new/lib/Net/Amazon/S3.pm
--- Net-Amazon-S3-0.41/lib/Net/Amazon/S3.pm 2007-11-30 11:42:51.000000000 +0100
+++ Net-Amazon-S3-0.41-new/lib/Net/Amazon/S3.pm 2008-02-05 07:54:36.000000000 +0100
@@ -109,7 +109,7 @@
use base qw(Class::Accessor::Fast);
__PACKAGE__->mk_accessors(
- qw(libxml aws_access_key_id aws_secret_access_key secure ua err errstr timeout)
+ qw(libxml aws_access_key_id aws_secret_access_key secure ua err errstr timeout limit_rate)
);
our $VERSION = '0.41';