50

I am at a loss of what I am doing wrong, here is what I have:

HTML

<html>
<body>
    <form method="POST" action="/upload" enctype="multipart/form-data">
        <div class="field">
            <label for="image">Image Upload</label>
            <input type="file" name="image" id="image">
        </div>
        <input type="submit" class="btn" value="Save">
    </form>
</body>
</html>

Port 5000 is my Node.js server's port.

In this example I am using POST to /upload, and it works fine.

module.exports = function(app, models) {

    var fs = require('fs');
    var AWS = require('aws-sdk');
    var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
    var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

    AWS.config.update({
        accessKeyId: accessKeyId,
        secretAccessKey: secretAccessKey
    });

    var s3 = new AWS.S3();

    app.post('/upload', function(req, res){

        var params = {
            Bucket: 'makersquest',
            Key: 'myKey1234.png',
            Body: "Hello"
        };

        s3.putObject(params, function (perr, pres) {
            if (perr) {
                console.log("Error uploading data: ", perr);
            } else {
                console.log("Successfully uploaded data to myBucket/myKey");
            }
        });
    });

}

Now I want to post the file that I am POSTing, which is where the problem arises.

module.exports = function(app, models) {

    var fs = require('fs');
    var AWS = require('aws-sdk');
    var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
    var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

    AWS.config.update({
        accessKeyId: accessKeyId,
        secretAccessKey: secretAccessKey
    });

    var s3 = new AWS.S3();

    app.post('/upload', function(req, res){
        var path = req.files.image.path;
        fs.readFile(path, function(err, file_buffer){
            var params = {
                Bucket: 'makersquest',
                Key: 'myKey1234.png',
                Body: file_buffer
            };

            s3.putObject(params, function (perr, pres) {
                if (perr) {
                    console.log("Error uploading data: ", perr);
                } else {
                    console.log("Successfully uploaded data to myBucket/myKey");
                }
            });
        });
    });
}

The error I get is:

TypeError: Cannot read property 'path' of undefined

As a matter of fact files is completely empty.

I am assuming I am missing something pretty obvious but I can't seem to find it.

Zeeshan Hassan Memon
  • 7,232
  • 3
  • 36
  • 50
abritez
  • 2,486
  • 3
  • 26
  • 34

5 Answers5

35

You will need something like multer to handle multipart uploading. Here is an example streaming your file upload to s3 using aws-sdk.

var multer = require('multer');
var AWS = require('aws-sdk');

var accessKeyId =  process.env.AWS_ACCESS_KEY || "xxxxxx";
var secretAccessKey = process.env.AWS_SECRET_KEY || "+xxxxxx+B+xxxxxxx";

AWS.config.update({
    accessKeyId: accessKeyId,
    secretAccessKey: secretAccessKey
});

var s3 = new AWS.S3();

app.use(multer({ // https://github.com/expressjs/multer
  dest: './public/uploads/', 
  limits : { fileSize:100000 },
  rename: function (fieldname, filename) {
    return filename.replace(/\W+/g, '-').toLowerCase();
  },
  onFileUploadData: function (file, data, req, res) {
    // file : { fieldname, originalname, name, encoding, mimetype, path, extension, size, truncated, buffer }
    var params = {
      Bucket: 'makersquest',
      Key: file.name,
      Body: data
    };

    s3.putObject(params, function (perr, pres) {
      if (perr) {
        console.log("Error uploading data: ", perr);
      } else {
        console.log("Successfully uploaded data to myBucket/myKey");
      }
    });
  }
}));

app.post('/upload', function(req, res){
    if(req.files.image !== undefined){ // `image` is the field name from your form
        res.redirect("/uploads"); // success
    }else{
        res.send("error, no file chosen");
    }
});
theRemix
  • 1,910
  • 13
  • 16
  • 4
    In this example, Should I remove local file? such as `, onFileUploadComplete: function (file, req, res) { fs.unlink(file.path) }` – Eun Bit Hwang Jun 29 '15 at 14:24
31

Latest Answer @ Dec-2016 [New]

Use multer-s3 for multipart uploading to s3 without saving on local disk as:

var express = require('express'),
    aws = require('aws-sdk'),
    bodyParser = require('body-parser'),
    multer = require('multer'),
    multerS3 = require('multer-s3');

aws.config.update({
    secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
    accessKeyId: 'XXXXXXXXXXXXXXX',
    region: 'us-east-1'
});

var app = express(),
    s3 = new aws.S3();

app.use(bodyParser.json());

var upload = multer({
    storage: multerS3({
        s3: s3,
        bucket: 'bucket-name',
        key: function (req, file, cb) {
            console.log(file);
            cb(null, file.originalname); //use Date.now() for unique file keys
        }
    })
});

//open in browser to see upload form
app.get('/', function (req, res) {
    res.sendFile(__dirname + '/index.html');
});

//use by upload form
app.post('/upload', upload.array('upl',1), function (req, res, next) {
    res.send("Uploaded!");
});

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

Latest Answer @ Mar-2016 [Old-One]

Edited 1 use multer@1.1.0 and multer-s3@1.4.1 for following snippet:

var express = require('express'),
    bodyParser = require('body-parser'),
    multer = require('multer'),
    s3 = require('multer-s3');

var app = express();

app.use(bodyParser.json());

var upload = multer({
    storage: s3({
        dirname: '/',
        bucket: 'bucket-name',
        secretAccessKey: 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
        accessKeyId: 'XXXXXXXXXXXXXXX',
        region: 'us-east-1',
        filename: function (req, file, cb) {
            cb(null, file.originalname); //use Date.now() for unique file keys
        }
    })
});

//open in browser to see upload form
app.get('/', function (req, res) {
    res.sendFile(__dirname + '/index.html');
});

//use by upload form
app.post('/upload', upload.array('upl'), function (req, res, next) {
    res.send("Uploaded!");
});

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});

For complete running example clone express_multer_s3 repo and run node app.

Zeeshan Hassan Memon
  • 7,232
  • 3
  • 36
  • 50
  • 6
    getting errors with this code `default: throw new TypeError('Expected opts.s3 to be object') ^ TypeError: Expected opts.s3 to be object at new S3Storage (M:\workspace\Angular2StartKit\node_modules\multer-s3\index.js:66:20) at module.exports (M:\workspace\Angular2StartKit\node_modules\multer-s3\index.js:150:10)` – user2180794 Jun 19 '16 at 20:33
  • How do I know if the upload was success or not and then redirect the screen based on that. Can you please show me how this is done. I am using `Multer-S3` – Illep Aug 13 '16 at 03:00
  • **Answer has been updated** @user2180794 hope both snippets work for you now – Zeeshan Hassan Memon Nov 30 '16 at 21:47
  • old but doesn't work for me (either of the 2). I get "Error: unexpected field". – Manuel Maestrini Aug 10 '17 at 09:44
  • @ManuelMaestrini did you follow the instructions properly or did you try it by cloning node-cheat? – Zeeshan Hassan Memon Aug 10 '17 at 10:49
  • I just followed the instructions and did .single as opposed to .array – Manuel Maestrini Aug 10 '17 at 11:12
  • i am using your code but this error is come "Expected opts.s3 to be object " – Nallasamy Nss May 10 '18 at 12:41
  • 1
    @NallasamyNss you must be using inappropriate versions of npm modules, see this node-cheat link, see and install mentioned modules with correct version to make it work: https://github.com/zishon89us/node-cheat/blob/master/aws/express_multer_s3/app_es8.js – Zeeshan Hassan Memon May 10 '18 at 15:42
13

Simple S3 File Upload Without Multer

var express = require('express')
const fileUpload = require('express-fileupload');
const app = express();

app.use(fileUpload());


var AWS = require('aws-sdk');

app.post('/imageUpload', async (req, res) => {
    AWS.config.update({
        accessKeyId: "ACCESS-KEY", // Access key ID
        secretAccesskey: "SECRET-ACCESS-KEY", // Secret access key
        region: "us-east-1" //Region
    })


    const s3 = new AWS.S3();

    // Binary data base64
    const fileContent  = Buffer.from(req.files.uploadedFileName.data, 'binary');

    // Setting up S3 upload parameters
    const params = {
        Bucket: 'BUKET-NAME',
        Key: "test.jpg", // File name you want to save as in S3
        Body: fileContent 
    };

    // Uploading files to the bucket
    s3.upload(params, function(err, data) {
        if (err) {
            throw err;
        }
        res.send({
            "response_code": 200,
            "response_message": "Success",
            "response_data": data
        });
    });

})

app.listen(3000, function () {
    console.log('Example app listening on port 3000!');
});
Surojit Paul
  • 732
  • 6
  • 10
1

You need something like multer in your set of middleware to handle multipart/form-data for you and populate req.files. From the doco:

var express = require('express')
var multer  = require('multer')

var app = express()
app.use(multer({ dest: './uploads/'}))

Now req.files.image.path should be populated in your app.post function.

morloch
  • 1,811
  • 1
  • 14
  • 18
0

This stack overflow was the best answer I found explaining exactly how to get Node to S3 working.

AWS Missing credentials when i try send something to my S3 Bucket (Node.js)

This in addition to some more stuff I had to hack on to get it all working. In my situation I was using a MEAN stack application so my Node file I was working with was a route file.

my aconfig.json file with the amazon credentials looks like this:

{ "accessKeyId": "*****YourAccessKey****", "secretAccessKey": "***YourSecretKey****" }

The final contents of the route file look like the file pasted below.

router.post('/sendToS3', function(req, res) {

var fs = require('fs');
var multer = require('multer');
var AWS = require('aws-sdk');
var path = require('path');

var awsCredFile = path.join(__dirname, '.', 'aconfig.json');

console.log('awsCredFile is');
console.log(awsCredFile);

AWS.config.loadFromPath(awsCredFile);

var s3 = new AWS.S3();

var photoBucket = new AWS.S3({params: {Bucket: 'myGreatBucketName'}});

var sampleFile = {
    "_id" : 345345,
    "fieldname" : "uploads[]",
    "originalname" : "IMG_1030.JPG",
    "encoding" : "7bit",
    "mimetype" : "image/jpeg",
    "destination" : "./public/images/uploads",
    "filename" : "31a66c51883595e74ab7ae5e66fb2ab8",
    "path" : "/images/uploads/31a66c51883595e74ab7ae5e66fb2ab8",
    "size" : 251556,
    "user" : "579fbe61adac4a8a73b6f508"
};

var filePathToSend = path.join(__dirname, '../public', sampleFile.path);


function uploadToS3(filepath, destFileName, callback) {
    photoBucket
        .upload({
            ACL: 'public-read',
            Body: fs.createReadStream(filepath),
            Key: destFileName.toString(),
            ContentType: 'application/octet-stream' // force download if it's accessed as a top location
        })
        // http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#httpUploadProgress-event
        .on('httpUploadProgress', function(evt) { console.log(evt); })
        // http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3/ManagedUpload.html#send-property
        .send(callback);
}

multer({limits: {fileSize:10*1024*1024}});

console.log('filePathToSend is ');
console.log(filePathToSend);

uploadToS3(filePathToSend, sampleFile.filename, function (err, data) {
    if (err) {
        console.error(err);
        return res.status(500).send('failed to upload to s3').end();
    }
    res.status(200)
        .send('File uploaded to S3: '
            + data.Location.replace(/</g, '&lt;')
            + '<br/><img src="' + data.Location.replace(/"/g, '&quot;') + '"/>')
        .end();
});

console.log('uploading now...');

});

This took me a while to finally get working, but if you setup the route below, update the sampleFile JSON to point to a real file on your system and hit it with Postman it will publish a file to your S3 account.

Hope this helps

Community
  • 1
  • 1
JasonPerr
  • 239
  • 2
  • 15
  • Please do not hesitate to describe the answer even though the link leads to the detailed view of it. – Gar Aug 12 '16 at 17:04