My Use-case is as follows: I make plenty of rest API calls from my node server to public APIs. Sometime the response is big and sometimes its small. My use-case demands me to stringify the response JSON. I know a big JSON as response is going to block my event loop. After some research i decided to use child_process.fork for parsing these responses, so that the other API calls need not wait. I tried sending a big 30 MB JSON file from my main process to the forked child_process. It takes so long for the child process to pick and parse the json. The response im expecting from the child process is not huge. I just want to stringify and get the length and send back to the main process.
Im attaching the master and child code.
var moment = require('moment');
var fs = require('fs');
var process = require('child_process');
var request = require('request');
var start_time = moment.utc().valueOf();
request({url: 'http://localhost:9009/bigjson'}, function (err, resp, body) {
if (!err && resp.statusCode == 200) {
console.log('Body Length : ' + body.length);
var ls = process.fork("response_handler.js", 0);
ls.on('message', function (message) {
console.log(moment.utc().valueOf() - start_time);
console.log(message);
});
ls.on('close', function (code) {
console.log('child process exited with code ' + code);
});
ls.on('error', function (err) {
console.log('Error : ' + err);
});
ls.on('exit', function (code, signal) {
console.log('Exit : code : ' + code + ' signal : ' + signal);
});
}
ls.send({content: body});
});
response_handler.js
console.log("Process " + process.argv[2] + " at work ");
process.on('message', function (json) {
console.log('Before Parsing');
var x = JSON.stringify(json);
console.log('After Parsing');
process.send({msg: 'Sending message from the child. total size is' + x.length});
});
Is there a better way to achieve what im trying to do? On one hand i need the power of node.js to make 1000's of API calls per second, but sometimes i get a big JSON back which screws things up.