This is a function attached to an express (v3.8.0) route. I am using the latest nodejs stable (v0.10.32 atm). Let's say "fileASDF" is a path to a JSON file containing all the filepaths for which I want to get the md5 sums. This seems to be working but I 've noticed that the memory increase of the node process is (especially when the number of the files is in the hundreds) somewhere around 100mb in the first few requests. If I start the node server and begin hitting (let's say make 100 requests) this route, it will quickly go up to 500mb and then, even after no subsequent requests on this route are made, the memory won't go back to the ~100mb (total) that the server is usually operating on idle.
I tried loading the server through node REPL with --expose_gc flag and calling global.gc() to see if the memory will get released, but nothing.
Ok, maybe this is super obvious, but I can't understand what it is that I am doing wrong.. Any ideas on why the memory is not getting released?
exports.hashCalculator = function () {
var fs = require("graceful-fs");
var path = require("path");
var fileWithPathsArray = fs.readFileSync(fileASDF);
var filePathsJson = JSON.parse(fileWithPathsArray);
var pathsArray = fielPathsJson.files;
return function(req, res) {
var z = 0;
var len = pathsArray.length;
var results = [];
function calculateHashes(filePath){
var shasum = require('crypto').createHash('md5');
var rs = fs.ReadStream(item);
function updateSum(d){
shasum.update(d);
}
function getDigest(){
var d = shasum.digest('hex');
results.push({item: d});
z += 1;
if(z === len)
res.send({hashes: results});
}
s.on('data', updateSum);
s.on('end', getDigest);
}
pathsArray.forEach(calculateHashes);
};
}