Reputation: 1557
I have a really similar approach as: Resizing image with nodeJs and AWS. And I still get the same error when I run my service locally on Windows 10. It works on lambda cloud flawless. I found this discussion about how limited memory or limited timeout could throw an stderr
in gm
: Stream yields empty buffer error when processing large image files using gm. Besides, this discussion also mentioned about how
limited memory will make your lambda service vulnerable and not robust: In amazon lambda, resizing multiple thumbnail sizes in parallel async throws Error: Stream yields empty buffer. However, I'm running this node service locally with plenty of idle memory, I shouldn't run into this "limited memory" trap all the time. I'm thinking this problem is from gm
. And here is my code:
async.forEachOf(_sizesArray, function(value, key, callback) {
async.waterfall([
function download(next) {
s3.getObject({
Bucket: srcBucket,
Key: srcKey
}, next);
},
function convert(response, next) {
console.log(response.Body);
gm(response.Body,srcKey).antialias(true).density(
300).toBuffer('JPG', function(err,
buffer) {
if (err) {
console.log(err + "\n\nfrom convert\n");
next(err);
} else {
next(null, buffer);
}
});
},
function process(response, next) {
gm(response).size(function(err, size) {
var scalingFactor = Math.min(
_sizesArray[key].width /
size.width, _sizesArray[
key].width / size.height
);
var width = scalingFactor *
size.width;
var height = scalingFactor *
size.height;
var index = key;
this.resize(width, height).toBuffer(
'JPG', function(err,
buffer) {
if (err) {
console.log(err+"\n\nfrom process\n");
next(err);
} else {
next(null,buffer,key);
}
});
});
},
function upload(data, index, next) {
s3.putObject({
Bucket: dstBucket,
Key: myPath + "/" + fileName.slice(0, -4) +
_sizesArray[index].suffix +
".jpg",
Body: data,
ContentType: 'JPG'
}, next);
}
], function(err, result) {
if (err) {
console.error(err);
}
console.log("End of step " + key);
callback();
});
}, function(err) {
if (err) {
console.error('Unable to resize ' + srcBucket +
'/' + srcKey + ' and upload to ' + dstBucket +
myPath + '/' + ' due to an error: ' + err);
} else {
console.log('Successfully resized ' + srcBucket +
' and uploaded to ' + dstBucket + '/' + myPath + '/');
}
cb(myPath+"/"+fileName);
});
To be more specific, my async.waterfall()
function will be exited when executing function convert(res,next)
and toBuffer()
will throw the error I mentioned in the question title.
Upvotes: 2
Views: 4641
Reputation: 1557
So my original code works on lambda. The reason why I can't get the it work on my local node is that I didn't have my gm
installed correctly.(convert
binary is missing) To resolve this issue, I reinstalled gm
binaries from GraphicsMagick Download and updated it to the latest version. In install wizard, check something like "associate related files". Then directly import it as:
var gm = require('gm');
instead of:
var gm = require('gm').subClass({
imageMagick: true
});
Upvotes: 5