Streaming file from S3 with Express including information on length and filetype
One approach is listening the httpHeaders
event and creating a stream within it.
s3.getObject(params) .on('httpHeaders', function (statusCode, headers) { res.set('Content-Length', headers['content-length']); res.set('Content-Type', headers['content-type']); this.response.httpResponse.createUnbufferedStream() .pipe(res); }) .send();
For my project, I simply do a headObject in order to retrieve the object metadata only (it's really fast and avoid to download the object). Then I add in the response all the headers I need to propagate for the piping:
var s3 = new AWS.S3(); var params = { Bucket: bucket, Key: key }; s3.headObject(params, function (err, data) { if (err) { // an error occurred console.error(err); return next(); } var stream = s3.getObject(params).createReadStream(); // forward errors stream.on('error', function error(err) { //continue to the next middlewares return next(); }); //Add the content type to the response (it's not propagated from the S3 SDK) res.set('Content-Type', mime.lookup(key)); res.set('Content-Length', data.ContentLength); res.set('Last-Modified', data.LastModified); res.set('ETag', data.ETag); stream.on('end', () => { console.log('Served by Amazon S3: ' + key); }); //Pipe the s3 object to the response stream.pipe(res); });
Building on André Werlang's answer, we have done the following to augment AWS Request
objects with a forwardToExpress
method:
const _ = require('lodash');const AWS = require('aws-sdk');AWS.Request.prototype.forwardToExpress = function forwardToExpress(res, next) { this .on('httpHeaders', function (code, headers) { if (code < 300) { res.set(_.pick(headers, 'content-type', 'content-length', 'last-modified')); } }) .createReadStream() .on('error', next) .pipe(res);};
Then, in our route handlers, we can do something like this:
s3.getObject({Bucket: myBucket, Key: myFile}).forwardToExpress(res, next);