Upload a file to Amazon S3 with NodeJS

2019-01-16 03:21发布

I ran into a problem while trying to upload a file to my S3 bucket. Everything works except that my file paramters do not seem appropriate. I am using Amazon S3 sdk to upload from nodejs to s3.

These are my routes settings:

var multiparty = require('connect-multiparty'),
    multipartyMiddleware = multiparty();
app.route('/api/items/upload').post(multipartyMiddleware, items.upload);

This is items.upload() function:

exports.upload = function(req, res) {
    var file = req.files.file;
    var s3bucket = new AWS.S3({params: {Bucket: 'mybucketname'}});
    s3bucket.createBucket(function() {
        var params = {
            Key: file.name,
            Body: file
        };
        s3bucket.upload(params, function(err, data) {
            console.log("PRINT FILE:", file);
            if (err) {
                console.log('ERROR MSG: ', err);
            } else {
                console.log('Successfully uploaded data');
            }
        });
    });
};

Setting Body param to a string like "hello" works fine. According to doc, Body param must take (Buffer, Typed Array, Blob, String, ReadableStream) Object data. However, uploading a file object fails with the following error message:

[Error: Unsupported body payload object]

This is the file object:

{ fieldName: 'file',
  originalFilename: 'second_fnp.png',
  path: '/var/folders/ps/l8lvygws0w93trqz7yj1t5sr0000gn/T/26374-7ttwvc.png',
  headers: 
   { 'content-disposition': 'form-data; name="file"; filename="second_fnp.png"',
     'content-type': 'image/png' },
  ws: 
   { _writableState: 
      { highWaterMark: 16384,
        objectMode: false,
        needDrain: true,
        ending: true,
        ended: true,
        finished: true,
        decodeStrings: true,
        defaultEncoding: 'utf8',
        length: 0,
        writing: false,
        sync: false,
        bufferProcessing: false,
        onwrite: [Function],
        writecb: null,
        writelen: 0,
        buffer: [],
        errorEmitted: false },
     writable: true,
     domain: null,
     _events: { error: [Object], close: [Object] },
     _maxListeners: 10,
     path: '/var/folders/ps/l8lvygws0w93trqz7yj1t5sr0000gn/T/26374-7ttwvc.png',
     fd: null,
     flags: 'w',
     mode: 438,
     start: undefined,
     pos: undefined,
     bytesWritten: 261937,
     closed: true },
  size: 261937,
  name: 'second_fnp.png',
  type: 'image/png' }

Any help will be greatly appreciated!

4条回答
甜甜的少女心
2楼-- · 2019-01-16 04:01
var express = require('express')

app = module.exports = express();
var secureServer = require('http').createServer(app);
secureServer.listen(3001);

var aws = require('aws-sdk')
var multer = require('multer')
var multerS3 = require('multer-s3')

    aws.config.update({
    secretAccessKey: "XXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
    accessKeyId: "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX",
    region: 'us-east-1'
    });
    s3 = new aws.S3();

   var upload = multer({
   storage: multerS3({
    s3: s3,
    dirname: "uploads",
    bucket: "Your bucket name",
    key: function (req, file, cb) {
        console.log(file);
        cb(null, "uploads/profile_images/u_" + Date.now() + ".jpg"); //use  
     Date.now() for unique file keys
    }
  })
   });

 app.post('/upload', upload.single('photos'), function(req, res, next) {

 console.log('Successfully uploaded ', req.file)

 res.send('Successfully uploaded ' + req.file.length + ' files!')

})
查看更多
smile是对你的礼貌
3楼-- · 2019-01-16 04:03

Or Using promises:

const AWS = require('aws-sdk');
  AWS.config.update({
    accessKeyId: 'accessKeyId',
    secretAccessKey: 'secretAccessKey',
    region: 'region'
  });

let params = {
        Bucket: "yourBucketName",
        Key: 'someUniqueKey',
        Body: 'someFile'
      };
      try {
        let uploadPromise = await new AWS.S3().putObject(params).promise();
        console.log("Successfully uploaded data to bucket");
      } catch (e) {
        console.log("Error uploading data: ", e);
      }
查看更多
趁早两清
4楼-- · 2019-01-16 04:09

So it looks like there are a few things going wrong here. Based on your post it looks like you are attempting to support file uploads using the connect-multiparty middleware. What this middleware does is take the uploaded file, write it to the local filesystem and then sets req.files to the the uploaded file(s).

The configuration of your route looks fine, the problem looks to be with your items.upload() function. In particular with this part:

var params = {
  Key: file.name,
  Body: file
};

As I mentioned at the beginning of my answer connect-multiparty writes the file to the local filesystem, so you'll need to open the file and read it, then upload it, and then delete it on the local filesystem.

That said you could update your method to something like the following:

var fs = require('fs');
exports.upload = function (req, res) {
    var file = req.files.file;
    fs.readFile(file.path, function (err, data) {
        if (err) throw err; // Something went wrong!
        var s3bucket = new AWS.S3({params: {Bucket: 'mybucketname'}});
        s3bucket.createBucket(function () {
            var params = {
                Key: file.originalFilename, //file.name doesn't exist as a property
                Body: data
            };
            s3bucket.upload(params, function (err, data) {
                // Whether there is an error or not, delete the temp file
                fs.unlink(file.path, function (err) {
                    if (err) {
                        console.error(err);
                    }
                    console.log('Temp File Delete');
                });

                console.log("PRINT FILE:", file);
                if (err) {
                    console.log('ERROR MSG: ', err);
                    res.status(500).send(err);
                } else {
                    console.log('Successfully uploaded data');
                    res.status(200).end();
                }
            });
        });
    });
};

What this does is read the uploaded file from the local filesystem, then uploads it to S3, then it deletes the temporary file and sends a response.

There's a few problems with this approach. First off, it's not as efficient as it could be, as for large files you will be loading the entire file before you write it. Secondly, this process doesn't support multi-part uploads for large files (I think the cut-off is 5 Mb before you have to do a multi-part upload).

What I would suggest instead is that you use a module I've been working on called S3FS which provides a similar interface to the native FS in Node.JS but abstracts away some of the details such as the multi-part upload and the S3 api (as well as adds some additional functionality like recursive methods).

If you were to pull in the S3FS library your code would look something like this:

var fs = require('fs'),
    S3FS = require('s3fs'),
    s3fsImpl = new S3FS('mybucketname', {
        accessKeyId: XXXXXXXXXXX,
        secretAccessKey: XXXXXXXXXXXXXXXXX
    });

// Create our bucket if it doesn't exist
s3fsImpl.create();

exports.upload = function (req, res) {
    var file = req.files.file;
    var stream = fs.createReadStream(file.path);
    return s3fsImpl.writeFile(file.originalFilename, stream).then(function () {
        fs.unlink(file.path, function (err) {
            if (err) {
                console.error(err);
            }
        });
        res.status(200).end();
    });
};

What this will do is instantiate the module for the provided bucket and AWS credentials and then create the bucket if it doesn't exist. Then when a request comes through to upload a file we'll open up a stream to the file and use it to write the file to S3 to the specified path. This will handle the multi-part upload piece behind the scenes (if needed) and has the benefit of being done through a stream, so you don't have to wait to read the whole file before you start uploading it.

If you prefer, you could change the code to callbacks from Promises. Or use the pipe() method with the event listener to determine the end/errors.

If you're looking for some additional methods, check out the documentation for s3fs and feel free to open up an issue if you are looking for some additional methods or having issues.

查看更多
相关推荐>>
5楼-- · 2019-01-16 04:09

I found the following to be a working solution::

npm install aws-sdk


Once you've installed the aws-sdk , use the following code replacing values with your where needed.

var AWS = require('aws-sdk');
var fs =  require('fs');

var s3 = new AWS.S3();

// Bucket names must be unique across all S3 users

var myBucket = 'njera';

var myKey = 'jpeg';
//for text file
//fs.readFile('demo.txt', function (err, data) {
//for Video file
//fs.readFile('demo.avi', function (err, data) {
//for image file                
fs.readFile('demo.jpg', function (err, data) {
  if (err) { throw err; }



     params = {Bucket: myBucket, Key: myKey, Body: data };

     s3.putObject(params, function(err, data) {

         if (err) {

             console.log(err)

         } else {

             console.log("Successfully uploaded data to myBucket/myKey");

         }

      });

});

I found the complete tutorial on the subject here in case you're looking for references ::


How to upload files (text/image/video) in amazon s3 using node.js

查看更多
登录 后发表回答