I am recently writing a small project that will make multiple network requests and piped the response stream to express response.
I found that express seems to send response to client when the first stream finishes.
var logger = require('../lib/logger');
var request = require('request');
var JsonStream = require('JSONStream');
var gFunc = {};
function reqAndParse(shards){
gFunc.res.set({'Transfer-Encoding': 'chunked'});
var idNum = 0;
//var completed_reqs = 0;
var completed_res = 0;
for(shard in shards){
//console.log('shard:', shards[shard]);
var reqUrl = 'http://localhost:xxxx/';
reqUrl += shard;
var reqStream = request({url: reqUrl},
function(err, res, body){
if(err){
logger.log('[ERROR]errmsg:%s, errstack:%s', err.message, err.stack);
gFunc.res.status(500).json({'err':'server internal error'});
}
});
// gFunc.res.on('finish', function(){
// if(++completed_res === shards.length){
// gFunc.res.status(400).end();
// }
// });
reqStream.pipe(JsonStream.parse('response.*.id', function(element){
console.log('element:', element);
idNum++;
if(gFunc.req.query.rows && idNum > req.query.rows){
return null;
}
var csv_id = element;
csv_id += '\n';
return csv_id;
}))
.pipe(gFunc.res);
}
}
exports.handleRequest = function handleRequest(req,res){
gFunc.req = req;
gFunc.res = res;
//...
shards = ['shard1', 'shard2', 'shard3']
reqAndParse(shards);
}
When I 'curl' this service, the client will get a 200 response with the data from the first stream, sent by express.
Where does express send this response? How can I avoid this behaviour and send the data from all network streams back to the client?