Reputation: 1297
I'm writing a small script for node.js to read a bunch of image names (2.5k) from a file, resize the images and output them to a directory. My naïve way leads to running out of file handles:
//get the list of images, one per line in the file
var imgs = file.split('\n');
//keep track of how many images we've processed
var done = imgs.length;
var deferred = Q.defer();
for (var i = 0; i < imgs.length; i++) {
(function resizeImg(img) {
//open the file for writing the resized image to
var stream = fs.createWriteStream('images/' + img);
stream
.on('open', function () {
//now that it's opened, resize the source image, and write it
//out to the stream
gm(img)
.resize(200, 200)
.write(stream, function (err) {
//we're finished writing - if there was an error, reject
//otherwise, we can resolve the promise if this was the last image
if (err)
deferred.reject(err);
else if (--done <= 0)
deferred.resolve();
});
});
})(imgs[i]);
}
return deferred.promise;
Really what I need to do is queue all the resize operations and run them sequentially so that it doesn't open all the files at the same time, but I'm not sure how to do this. Is there a standard pattern for this sort of thing?
Upvotes: 1
Views: 114
Reputation: 18078
It's easier, though not essential, to separate out the resizing process into a function that returns a promise.
This allows you to see the wood for the trees in the main routine.
function resizeImages(file) {
//An inner utility function that returns a function that does the hard work and, importantly, that returns promise.
function resize(img) {
return function() {
var deferred = Q.defer(),
stream = fs.createWriteStream('images/' + img);
stream.on('open', function() {
gm(img).resize(200, 200).write(stream, deferred.resolve);//Always resolve, even if error is reported, thus allowing the .then chain to continue.
});
return deferred.promise;
}
}
var p = Q();//resolved starter promise
//main routine - build a .then chain
for(var imgs=file.split("\n"), i=0; i<imgs.length; i++) {
p = p.then(resize(imgs[i])).then(function(err) {
//Yup, we're handling reported errors in the success handler!
if(err) {
//Handle error here.
//throw(something) to stop the process or don't throw(anything) to continue.
}
});
};
return p;
}
Upvotes: 0
Reputation: 6948
Can you do something like this:
//get the list of images, one per line in the file
var imgs = file.split('\n');
//keep track of how many images we've processed
var done = imgs.length;
//Store an array of functions to be executed in sequence
var funcArr = [];
for (var i = 0; i < imgs.length; i++) {
//push a promise function onto the array
funcArr.push((function resizeImg(img) {
return function () {
var deferred = Q.defer();
//open the file for writing the resized image to
var stream = fs.createWriteStream('images/' + img);
stream
.on('open', function () {
//now that it's opened, resize the source image, and write it
//out to the stream
gm(img)
.resize(200, 200)
.write(stream, function (err) {
//we're finished writing - if there was an error, reject
//otherwise, we can resolve the promise if this was the last image
if (err)
deferred.reject(err);
else
deferred.resolve();
});
});
return deferred.promise;
}
})(imgs[i]));
}
//Sequences as described at http://documentup.com/kriskowal/q/
var result = Q();
funcArr.forEach(function (f) {
result = result.then(f, function (reason) {
//Default error handler for each image in the sequence that does a reject
});
});
//At this point result is a promise that will be resolved when all images have processed
return result;
Each iteration of the for loop pushes a function that returns a promise into the funcArr array. Q is used after the for loop to chain promises together for sequencing. This should ensure that one image is processed before moving to the next.
Upvotes: 1