I want to download some image files from s3 bucket on my local system using Promises in node.js.
var params = {
Bucket: bucket_name',
Key: 'key'
};
var fileStream = fs.createWriteStream('path/to/file.jpg');
I tried this which is working
s3.getObject(params).createReadStream.pipe(fileStream);
But I want my code look like this
return s3.getObject(params).promise()
.then(function(data) {
//console.log(data.Body);
// No idea about this section
})
.catch(function(err) {
throw err;
});
I have to use Promise to ensure all images should be downloaded.
One possible solution is to use bluebird
and create a function that returns a promise on the end of the stream:
const B = require('bluebird');
function downloadFromS3 (object) {
var p = B.Promise.defer();
var stream = s3.getObject(params).createReadStream()
stream.pipe(fileStream);
stream.on('error', (e) => p.reject(e))
stream.on('end', () => p.resolve())
return p.promise;
}
downloadFromS3(params)
.then(() => console.log('finished'))
.catch(() => console.log('failed'))
Not sure if this code specifically would work, but it may give you a direction to look into.
streamToPromise = require('stream-to-promise');
var fileStream = fs.createWriteStream('path/to/file.jpg');
streamToPromise(fileStream).then(function () {
console.log('Image saved to file.');
});
s3.getObject(params).createReadStream.pipe(fileStream);
Here's a native promise solution with error detection on the read stream and on the write stream.
function streamPromise(stream) {
return new Promise((resolve, reject) => {
stream.on('end', () => {
resolve('end');
});
stream.on('finish', () => {
resolve('finish');
});
stream.on('error', (error) => {
reject(error);
});
});
}
async function s3Download(srcBucket, srcKey, outputPath) {
var objReq = s3.getObject({
Bucket: srcBucket,
Key: srcKey
});
let outStream = fs.createWriteStream(outputPath);
let readStream = objReq.createReadStream();
readStream.on('error', (err) => {
console.warn('s3download error', err);
outStream.emit("error", err);
});
readStream.pipe(outStream);
return streamPromise(outStream);
}
Here is a snippet to use async/await with NodeJS 8:
const AWS = require('aws-sdk');
const fs = require('fs-extra');
const decompress = require('decompress');
const s3 = new AWS.S3();
const s3Params = {
Bucket: s3Location.bucketName,
Key: s3Location.objectKey,
};
const s3Object = await s3.getObject(s3Params).promise();
await fs.writeFile('myfile.zip', s3Object.Body);
await decompress('myfile.zip', 'myFileDir');
/* The compressed file is retrieved as "myfile.zip".
Content will be extracted in myFileDir directory */