I'm trying to get up to speed on Node.js and one thing I have been trying to accomplish is to mimic a simple file transfer agent with Node acting as the listening server. This would require node listening to tcp socket requests and then accepting the binary data stream and storing it in the system.
What I have so far is this, where the server still doesn't store anything just outputs the data it receives, or it should.
Server:
var net = require('net');
var fs = require('fs');
var buffer = require('buffer');
var server = net.createServer(function() {
console.log('server connected');
});
var HOST = '127.0.0.1';
var PORT = '9001'
var FILEPATH = '/home/steve/Downloads/';
server.listen(PORT, HOST, function() {
//listening
console.log('server bound to ' + PORT + '\n');
server.on('connection', function(){
console.log('connection made...\n')
})
server.on('data', function(data) {
console.log('data received');
console.log('data is: \n' + data);
});
});
And just to keep things simple, I wrote a hack of a client in Node as well.
Client:
var net = require('net');
var fs = require('fs');
var PORT = 9001;
var HOST = '127.0.0.1';
var FILEPATH = '/home/steve/Work/Node.js/FileTransfer/random.fil';
var client = new net.Socket()
//connect to the server
client.connect(PORT,HOST,function() {
'Client Connected to server'
//send a file to the server
var fileStream = fs.createReadStream(FILEPATH);
fileStream.on('error', function(err){
console.log(err);
})
fileStream.on('open',function() {
fileStream.pipe(client);
});
});
//handle closed
client.on('close', function() {
console.log('server closed connection')
});
client.on('error', function(err) {
console.log(err);
});
When I run them, I don't get any errors, but the server simply creates the server, accepts the connection then closes it. I can't seem to figure it out beyond that. Also, would this be a decent solution to transferring large files, in ~1GB range or is Node simply not geared for something like this?
TIA