I stream webcam data to my client.
I can see the data is arriving by listening on('data'). However, when I create it I am not able to view it and it's probably garbage data or missing some headers. VLC cannot play it.
My next step is to make it real-time streamable to browser.
What am I doing wrong?
net = require('net');
fs = require('fs');
// Start a TCP Server
net.createServer(function (socket) {
console.log("client connected");
var file = fs.createWriteStream("temp.mp4");
socket.pipe(file, {end: false});
socket.on('end', function(){
console.log("ended");
});
}).listen(5000);
I tested to see if did it really capture video output:
$ mediainfo temp.mp4
General
Complete name : temp.mp4
Format : H.263
Format version : H.263
File size : 126 KiB
Video
Format : H.263
Width : pixel0
Height : pixel0
Color space : YUV
Chroma subsampling : 4:2:0
Bit depth : 8 bits
Compression mode : Lossy
And this is the following Android code for setting mediaRecorder
(Assume socket is connected, no problem)
mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.DEFAULT);
mediaRecorder.setVideoSize(320, 240);
mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.DEFAULT);
mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.DEFAULT);
ParcelFileDescriptor pfd = ParcelFileDescriptor.fromSocket(socket);
mediaRecorder.setOutputFile(pfd.getFileDescriptor());
mediaRecorder.setMaxDuration(5000);
mediaRecorder.setMaxFileSize(5000000);