In amazon lambda, resizing multiple thumbnails in parallel asynchronous throws Error: Stream gives an empty buffer

I adapted the example to create several sizes of thumbnails and run them in parallel.

My code runs fine locally locally in a few seconds, but in the lambda cloud it will not start in parallel, causing an error after resizing the first thumbnail .. and if I switch it to serial rather than parallel, it takes about 60 seconds to run in serial.

Why running parallel code conversion in lambda causes the thread to give an empty buffer error. How to increase performance so that I can create dimensions in a few seconds, but still get good value and efficiency from lambda in terms of processor cost?

// dependencies var async = require('async'); var AWS = require('aws-sdk'); var gm = require('gm') .subClass({ imageMagick: true }); // Enable ImageMagick integration. var util = require('util'); // constants var SIZES = [100, 320, 640]; // get reference to S3 client var s3 = new AWS.S3(); exports.handler = function(event, context) { // Read options from the event. console.log("Reading options from event:\n", util.inspect(event, {depth: 5})); var srcBucket = event.Records[0].s3.bucket.name; var srcKey = event.Records[0].s3.object.key; var dstBucket = srcBucket + "-resized"; // Infer the image type. var typeMatch = srcKey.match(/\.([^.]*)$/); if (!typeMatch) { console.error('unable to infer image type for key ' + srcKey); return context.done(); } var imageType = typeMatch[1]; if (imageType != "jpg" && imageType != "png") { console.log('skipping non-image ' + srcKey); return context.done(); } // Sanity check: validate that source and destination are different buckets. if (srcBucket == dstBucket) { console.error("Destination bucket must not match source bucket."); return context.done(); } // Download the image from S3 s3.getObject({ Bucket: srcBucket, Key: srcKey }, function(err, response){ if (err) return console.error('unable to download image ' + err); var contentType = response.ContentType; var original = gm(response.Body); original.size(function(err, size){ if(err) return console.error(err); //transform, and upload to a different S3 bucket. async.each(SIZES, function (max_size, callback) { resize_photo(size, max_size, imageType, original, srcKey, dstBucket, contentType, callback); }, function (err) { if (err) { console.error( 'Unable to resize ' + srcBucket + ' due to an error: ' + err ); } else { console.log( 'Successfully resized ' + srcBucket ); } context.done(); }); }); }); }; //wrap up variables into an options object var resize_photo = function(size, max_size, imageType, original, srcKey, dstBucket, contentType, done) { var dstKey = max_size + "_" + srcKey; // transform, and upload to a different S3 bucket. async.waterfall([ function transform(next) { // Infer the scaling factor to avoid stretching the image unnaturally. var scalingFactor = Math.min( max_size / size.width, max_size / size.height ); var width = scalingFactor * size.width; var height = scalingFactor * size.height; // Transform the image buffer in memory. original.resize(width, height) .toBuffer(imageType, function(err, buffer) { if (err) { next(err); } else { next(null, buffer); } }); }, function upload(data, next) { // Stream the transformed image to a different S3 bucket. s3.putObject({ Bucket: dstBucket, Key: dstKey, Body: data, ContentType: contentType }, next); } ], function (err) { console.log('finished resizing ' + dstBucket + '/' + dstKey); if (err) { console.error(err) ; } else { console.log( 'Successfully resized ' + dstKey ); } done(err); } ); }; 
+8
amazon-web-services imagemagick aws-lambda
source share
1 answer

Today I faced the same question.

Although there might be something else you can do, I updated the memory of the lambda task and the buffer problem has disappeared.

I am resizing images around 2.1mb and 5000x3000 to 3 smaller sizes.

Duration: 11619.86 ms Duration: 11700 ms Memory size: 1024 MB Maximum usable memory: 582 MB

Hope that helps

+8
source share

All Articles