2

I am not able to upload large files like videos to s3. It eventually times out. I've tried to use fs to stream it but I must not be using it properly.

I've tried everything I can think of to get fs to stream this file. I don't know if it is possible to use fs the way I have it with multerS3 in a separate upload route. I can upload images and very small videos but nothing more.

// Here is my s3 index file which exports upload

const crypto = require('crypto');
const aws = require('aws-sdk');
const multerS3 = require('multer-s3');
const fs = require('fs');

aws.config.update({
  secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
  accessKeyId: process.env.AWS_ACCESS_KEY_ID,
  region: 'us-east-1',
  ACL: 'public-read'
});

const s3 = new aws.S3({ httpOptions: { timeout: 10 * 60 * 1000 }});
var options = { partSize: 5 * 1024 * 1024, queueSize: 10 };

const fileFilter = (req, file, cb) => {
  console.log('file.mimetype is ', file.mimetype);
  if (file.mimetype === 'image/jpeg' || file.mimetype === 'image/png' || file.mimetype === 'video/mp4' || file.mimetype === 'video/avi' || file.mimetype === 'video/mov' || file.mimetype === 'video/quicktime') {
    cb(null, true);
  } else {
    cb(new Error('Invalid file type'), false);
  }
}
const filename = getFileName();

const upload = multer({
  fileFilter,
  storage: multerS3({
    acl: 'public-read',
    s3,
    options,
    body: fs.createReadStream(filename),
    bucket: 'skilljack',
    metadata: function (req, file, cb) {
      cb(null, {fieldName: 'TESTING_METADATA'})
    },
    key: function (req, file, cb) {
        let buf = crypto.randomBytes(16);
        buf = buf.toString('hex');
        let uniqFileName = file.originalname.replace(/\.jpeg|\.jpg|\.png|\.avi|\.mov|\.mp4/ig, '');
        uniqFileName += buf;
      cb(undefined, uniqFileName );
    }
  })
});

function getFileName (req, file) {
  if (file) {
    const body = fs.createReadStream(file.originalname);
    return body;
  }
}

  module.exports = {
      upload
  }

// Here is my route file
const express = require('express');
const router = express.Router({ mergeParams: true });
const multer = require('multer');
const { upload } = require('../s3');
const { asyncErrorHandler, isLoggedIn, isAuthor } = require('../middleware');


const {
    postCreate,
    postDestroy
} = require('../controllers/posts');

router.post('/', isLoggedIn, asyncErrorHandler(isAuthor), upload.single('image'), asyncErrorHandler(postCreate));
router.delete('/:post_id', isLoggedIn, asyncErrorHandler(isAuthor), asyncErrorHandler(postDestroy));
module.exports = router;
cthomas
  • 21
  • 2
  • Was this resolved? – Mike K Oct 28 '19 at 10:46
  • I was not able to get it to work with multerS3. I could get it to work by uploading the file locally then loading it to S3 in chunks. I used Agenda to run it as a separate job and used s3.upload to upload to S3 and fs to read the incoming file. The other way I got it to work was by S3 loading the file directly from the browser but then I couldn't get it to upload in chunks so I went with the first method. This second way everything was done on S3's end. – cthomas Oct 29 '19 at 13:26

2 Answers2

0

Was having the same issue for days. A way to avoid it (but taking a long time) is to reduce the queueSize to 1.

That happens when your network is not great, eventually, some queue will remain idle and proc a timeout.

QueueSize to 1 will allow to allocate more bandwith to the part being uploaded, avoiding a timeout.

0

In addition to setting the queueSize to 1, you may want to disable the timeout.

const s3 = new aws.S3({
    accessKeyId: config.get('accessKeyId'),
    secretAccessKey: config.get('secretAccessKey'),
    Bucket: config.get('bucket'),
});

s3.config.httpOptions.timeout = 0

Reference

Mike K
  • 4,270
  • 4
  • 25
  • 50