Skip to content

Commit

Permalink
Merge pull request #79 from tomlagier/issues/78-batching-file-uploads
Browse files Browse the repository at this point in the history
Adds support for batching requests to S3
  • Loading branch information
lukemelia authored Dec 22, 2016
2 parents c9716b7 + dd6466d commit 51f3d54
Show file tree
Hide file tree
Showing 5 changed files with 198 additions and 48 deletions.
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,14 @@ The Server-side encryption algorithm used when storing this object in S3 (e.g.,
- "AES256"
- "aws:kms"

### batchSize

S3 `putObject` requests will be performed in `batchSize` increments if set to a value other than 0.

Useful when deploying applications to [fake-s3](https://github.com/jubos/fake-s3/), or applications large enough to trigger S3 rate limits (very uncommon).

*Default:* `0`

## Prerequisites

The following properties are expected to be present on the deployment `context` object:
Expand Down
7 changes: 5 additions & 2 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ module.exports = {
cacheControl: 'max-age='+TWO_YEAR_CACHE_PERIOD_IN_SEC+', public',
expires: EXPIRE_IN_2030,
dotFolders: false,
batchSize: 0,
distDir: function(context) {
return context.distDir;
},
Expand All @@ -44,7 +45,7 @@ module.exports = {
},
requiredConfig: ['bucket', 'region'],

upload: function(context) {
upload: function() {
var self = this;

var filePattern = this.readConfig('filePattern');
Expand All @@ -59,6 +60,7 @@ module.exports = {
var expires = this.readConfig('expires');
var dotFolders = this.readConfig('dotFolders');
var serverSideEncryption = this.readConfig('serverSideEncryption');
var batchSize = this.readConfig('batchSize');

var filesToUpload = distFiles.filter(minimatch.filter(filePattern, { matchBase: true, dot: dotFolders }));

Expand All @@ -75,7 +77,8 @@ module.exports = {
acl: acl,
manifestPath: manifestPath,
cacheControl: cacheControl,
expires: expires
expires: expires,
batchSize: batchSize
};

if (serverSideEncryption) {
Expand Down
119 changes: 74 additions & 45 deletions lib/s3.js
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ module.exports = CoreObject.extend({
upload: function(options) {
options = options || {};
return this._determineFilePaths(options).then(function(filePaths) {
const allFilesUploaded = Promise.all(this._putObjects(filePaths, options));
const allFilesUploaded = this._putObjects(filePaths, options);

const manifestPath = options.manifestPath;
if (manifestPath) {
return allFilesUploaded.then(function(filesUploaded) {
return Promise.all(this._putObjects([manifestPath], options)).then(function(manifestUploaded) {
return this._putObject(manifestPath, options).then(function(manifestUploaded) {
return filesUploaded.concat(manifestUploaded);
});
}.bind(this));
Expand Down Expand Up @@ -98,7 +98,7 @@ module.exports = CoreObject.extend({
}
},

_putObjects: function(filePaths, options) {
_putObject: function(filePath, options, filePaths) {
var plugin = this._plugin;
var cwd = options.cwd;
var bucket = options.bucket;
Expand All @@ -111,55 +111,84 @@ module.exports = CoreObject.extend({

mime.default_type = options.defaultMimeType || mime.lookup('bin');

return filePaths.map(function(filePath) {
var basePath = path.join(cwd, filePath);
var data = fs.readFileSync(basePath);
var contentType = mime.lookup(basePath);
var encoding = mime.charsets.lookup(contentType);
var key = prefix === '' ? filePath : [prefix, filePath].join('/');
var isGzipped = gzippedFilePaths.indexOf(filePath) !== -1;

if (isGzipped && path.extname(basePath) === '.gz') {
var basePathUncompressed = path.basename(basePath, '.gz');
if (filePaths.indexOf(basePathUncompressed) !== -1) {
contentType = mime.lookup(basePathUncompressed);
encoding = mime.charsets.lookup(contentType);
}
var basePath = path.join(cwd, filePath);
var data = fs.readFileSync(basePath);
var contentType = mime.lookup(basePath);
var encoding = mime.charsets.lookup(contentType);
var key = prefix === '' ? filePath : [prefix, filePath].join('/');
var isGzipped = gzippedFilePaths.indexOf(filePath) !== -1;

if (isGzipped && path.extname(basePath) === '.gz') {
var basePathUncompressed = path.basename(basePath, '.gz');
if (filePaths && filePaths.indexOf(basePathUncompressed) !== -1) {
contentType = mime.lookup(basePathUncompressed);
encoding = mime.charsets.lookup(contentType);
}
}

if (encoding) {
contentType += '; charset=';
contentType += encoding.toLowerCase();
}
if (encoding) {
contentType += '; charset=';
contentType += encoding.toLowerCase();
}

var params = {
Bucket: bucket,
ACL: acl,
Body: data,
ContentType: contentType,
Key: key,
CacheControl: cacheControl,
Expires: expires
};
var params = {
Bucket: bucket,
ACL: acl,
Body: data,
ContentType: contentType,
Key: key,
CacheControl: cacheControl,
Expires: expires
};

if (serverSideEncryption) {
params.ServerSideEncryption = serverSideEncryption;
}
if (serverSideEncryption) {
params.ServerSideEncryption = serverSideEncryption;
}

if (isGzipped) {
params.ContentEncoding = 'gzip';
}

if (isGzipped) {
params.ContentEncoding = 'gzip';
return new Promise(function(resolve, reject) {
this._client.putObject(params, function(error) {
if (error) {
reject(error);
} else {
plugin.log('✔ ' + key, { verbose: true });
resolve(filePath);
}
});
}.bind(this));
},

_currentEnd: 0,
_putObjectsBatch: function(filePaths, options) {
var currentBatch = filePaths.slice(this._currentEnd, Math.min(this._currentEnd + options.batchSize, filePaths.length));

this._currentEnd += currentBatch.length;

//Execute our current batch of promises
return Promise.all(currentBatch.map(function (filePath) {
return this._putObject(filePath, options, filePaths);
}.bind(this)))
//Then check if we need to execute another batch
.then(function () {
if (this._currentEnd < filePaths.length) {
return this._putObjectsBatch(filePaths, options);
}

return new Promise(function(resolve, reject) {
this._client.putObject(params, function(error, data) {
if (error) {
reject(error);
} else {
plugin.log('✔ ' + key, { verbose: true });
resolve(filePath);
}
});
}.bind(this));
return filePaths;
}.bind(this));
},

_putObjects: function (filePaths, options) {
if (options.batchSize > 0) {
this._currentEnd = 0;
return this._putObjectsBatch(filePaths, options);
}

return Promise.all(filePaths.map(function (filePath) {
return this._putObject(filePath, options, filePaths);
}.bind(this)));
}
});
2 changes: 1 addition & 1 deletion tests/unit/index-nodetest.js
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ describe('s3 plugin', function() {
return previous;
}, []);

assert.equal(messages.length, 5);
assert.equal(messages.length, 6);
});

describe('required config', function() {
Expand Down
110 changes: 110 additions & 0 deletions tests/unit/lib/s3-nodetest.js
Original file line number Diff line number Diff line change
Expand Up @@ -326,5 +326,115 @@ describe('s3', function() {
});
});
});

describe('with an integer batchSize specified', function () {
it('uploads all files', function () {
var options = {
filePaths: ['app.js', 'app.css'],
cwd: process.cwd() + '/tests/fixtures/dist',
prefix: 'js-app',
batchSize: 10
};

var promises = subject.upload(options);

return assert.isFulfilled(promises)
.then(function() {
assert.equal(mockUi.messages.length, 2);

var messages = mockUi.messages.reduce(function(previous, current) {
if (/- js-app\/app\.[js|css]/.test(current)) {
previous.push(current);
}

return previous;
}, []);

assert.equal(messages.length, 2);
});
});

it('returns a promise with an array of the files uploaded', function() {
var s3Params;
s3Client.putObject = function(params, cb) {
s3Params = params;
cb();
};

var options = {
filePaths: ['app.js', 'app.css'],
cwd: process.cwd() + '/tests/fixtures/dist',
prefix: 'js-app',
batchSize: 10
};

var promises = subject.upload(options);

return assert.isFulfilled(promises)
.then(function(filesUploaded) {
assert.deepEqual(filesUploaded, ['app.js', 'app.css']);
});
});

it('uploads the correct number of batches', function (done) {
var s3Params;
var requests = 0;

s3Client.putObject = function (params, cb) {
s3Params = params;
requests++;
cb();
};

var oldPutObjectsBatch = subject._putObjectsBatch.bind(subject);
var called = false;

//Spy on _putObjectsBatch, making sure that after it has executed a batch it has created 2 requests
subject._putObjectsBatch = function (paths, options) {
if (!called) {
called = true;
return oldPutObjectsBatch(paths, options);
}

assert.equal(requests, 2);

subject._putObjectsBatch = oldPutObjectsBatch;
return subject._putObjectsBatch(paths, options);
};

var options = {
filePaths: ['app.js', 'app.css', 'app.css.gz', 'manifest.txt'],
cwd: process.cwd() + '/tests/fixtures/dist',
prefix: 'js-app',
batchSize: 2
};

return subject.upload(options)
.then(function () {
assert.equal(requests, 4);
done();
})
.catch(function (reason) {
done(reason);
});
});

it('rejects if an upload fails', function (done) {
s3Client.putObject = function(params, cb) {
cb('error uploading');
};

var options = {
filePaths: ['app.js', 'app.css'],
cwd: process.cwd() + '/tests/fixtures/dist',
prefix: 'js-app',
batchSize: 10
};

var promises = subject.upload(options);

return assert.isRejected(promises).then(function () { done(); });
});
});
});
});

0 comments on commit 51f3d54

Please sign in to comment.