0

I'm recording the users screen via webrtc, and then posting video blobs every x seconds using MediaStreamRecorder. On the server side I have an action set up in sails which saves the blob as a webm file.

The problem is that I can't get it to append the data, and create one large webm file. When it appends the file size increases like expected, so the data is appending, but when I go to play the file it'll either play the first second, not play at all, or play but not show the video.

It would be possible to merge the files with ffmpeg, but I'd rather avoid this if at all possible.

Here's the code on the client:

'use strict';


// Polyfill in Firefox.
// See https://blog.mozilla.org/webrtc/getdisplaymedia-now-available-in-adapter-js/
if (typeof adapter != 'undefined' && adapter.browserDetails.browser == 'firefox') {
  adapter.browserShim.shimGetDisplayMedia(window, 'screen');
}

io.socket.post('/processvideo', function(resData) {
    console.log("Response: " + resData);
});

function handleSuccess(stream) {
  const video = document.querySelector('video');
  video.srcObject = stream;

var mediaRecorder = new MediaStreamRecorder(stream);
mediaRecorder.mimeType = 'video/webm';
mediaRecorder.ondataavailable = function (blob) {
    console.log("Sending Data");
    //var rawIO = io.socket._raw;
    //rawIO.emit('some:event', "using native socket.io");

    io.socket.post('/processvideo', {"vidblob": blob}, function(resData) {
        console.log("Response: " + resData);
    });
};
mediaRecorder.start(3000);
}

function handleError(error) {
  errorMsg(`getDisplayMedia error: ${error.name}`, error);
}

function errorMsg(msg, error) {
  const errorElement = document.querySelector('#errorMsg');
  errorElement.innerHTML += `<p>${msg}</p>`;
  if (typeof error !== 'undefined') {
    console.error(error);
  }
}

if ('getDisplayMedia' in navigator) {
  navigator.getDisplayMedia({video: true})
    .then(handleSuccess)
    .catch(handleError);
} else {
  errorMsg('getDisplayMedia is not supported');
}

Code on the server:

module.exports = async function processVideo (req, res) {
    var fs          = require('fs'),
        path        = require('path'),
        upload_dir  = './assets/media/uploads',
        output_dir  = './assets/media/outputs',
        temp_dir = './assets/media/temp';

    var params = req.allParams();
    if(req.isSocket && req.method === 'POST') {
        _upload(params.vidblob, "test.webm");
        return res.send("Hi There");
    }
    else {
        return res.send("Unknown Error");
    }

    function _upload(file_content, file_name) {
        var fileRootName = file_name.split('.').shift(),
        fileExtension = file_name.split('.').pop(),
        filePathBase = upload_dir + '/',
        fileRootNameWithBase = filePathBase + fileRootName,
        filePath = fileRootNameWithBase + '.' + fileExtension,
        fileID = 2;



        /* Save all of the files as different files. */
        /*
        while (fs.existsSync(filePath)) {
            filePath = fileRootNameWithBase + fileID +  '.' + fileExtension;
            fileID += 1;
        }
        fs.writeFileSync(filePath, file_content);
        */

        /* Appends the binary data like you'd expect, but it's not playable. */
        fs.appendFileSync(upload_dir + '/' + 'test.file', file_content);

    }
}

Any help would be greatly appreciated!

jib
  • 34,243
  • 11
  • 80
  • 138
Mark Hill
  • 161
  • 2
  • 9
  • 1
    You should be able to simply concatenate the data together. MediaRecorder will generate a valid WebM file. – Brad Sep 13 '18 at 00:55

1 Answers1

-3

I decided this would be difficult to develop, and wouldn't really fit the projects requirements. So I decided to build an electron app. Just posting this so I can resolve the question.

Mark Hill
  • 161
  • 2
  • 9