0

I'm getting some problems with this code in Lambda when I try to send large images. I sent a bunch of images, and resized and create several differents sizes, as the array SIZES show.

My Lambda configuration is on high, max memory (1536mb) and max timeout 5mim.

The problem occurs When large images are uploded. I searched through all internet but I could not find a solution.

Error

Unhandled rejection Error: Stream yields empty buffer at Socket.<anonymous> (/var/task/node_modules/gm/lib/command.js:57:17) at emitNone (events.js:72:20) at Socket.emit (events.js:166:7) at endReadableNT (_stream_readable.js:905:12) at nextTickCallbackWith2Args (node.js:437:9) at process._tickDomainCallback (node.js:392:17) 

**Here's the code.*

var gm = require('gm').subClass({ imageMagick: true }), aws = require('aws-sdk'), async = require('async'), fs = require('fs'), promise = require('bluebird'), request = require('request').defaults({ encoding: null }); var BUCKET = "myBucket", FOLDER = 'photos/', s3 = new aws.S3({ accessKeyId: 'myKeyId', secretAccessKey: 'mySecretKey' }), SIZES = [{ type: 'original',size: 1080,mark: false,hide: true }, { type: 'thumb',size: 120, mark: false, hide: false }, { type: 'thumb',size: 240, mark: false, hide: false }, { type: 'thumb',size: 360, mark: false, hide: false }, { type: 'card',size: 350 ,mark: false, hide: false }, { type: 'full',size: 360, mark: true, hide: false }, { type: 'full',size: 540, mark: true, hide: false }, { type: 'full',size: 720, mark: true, hide: false }, { type: 'full',size: 900, mark: true, ide: false }, { type: 'full',size: 1080, mark: true, hide: false }]; var service = { s3: { download: function(bucket, key) { return new promise(function(resolve, reject) { s3.getObject({ Bucket: bucket, Key: key }, function(err, data) { if (err) { reject(err); } else { resolve(data); } }); }); }, upload: { async: function(photo, bucket, key, content_type, private) { return new promise(function(resolve, reject) { s3.putObject({ Bucket: bucket, Key: key, Body: photo, ContentType: content_type, ACL: private ? 'private' : 'public-read' }, function(err, data) { if (err) { reject(err) } else { resolve(key); } }); }); } } }, watermark: { resize: function(photo, name) { var temp = '/tmp/w_' + name + '.png'; return new promise(function(resolve, reject) { gm(photo).size(function(err, size) { var that = this; if (err) { reject(err); } var smaller = size.width > size.height ? size.height : size.width; var _size = smaller * 0.8; request.get('https://s3.amazonaws.com/FOLDER/photos/watermark.png', function(e, r, body) { gm(body) .resize(_size) .write(temp, function(error) { if (error) reject(error); that.composite(temp) .gravity('Center') .toBuffer(function(e, buffer) { if (e) reject(e); fs.access(temp, fs.F_OK, function(err) { if (!err) { fs.unlinkSync(temp); } }); resolve(buffer); }); }); }); }); }); } }, resize: function(photo, width, height) { return new promise(function(resolve, reject) { gm(photo).resize(width, height).toBuffer(function(err, buffer) { if (err) { reject(err); } resolve(buffer); }); }); }, crop: function(photo, width, height) { return new promise(function(resolve, reject) { gm(photo).size(function(err, size) { if (err) { reject(err); } var x = 0, y = 0; if (size.width > size.height) { this.resize(null, height); x = (((height * size.width) / size.height) - width) / 2; } else { this.resize(width); y = (((width * size.height) / size.width) - height) / 2; } this.crop(width, height, x, y).toBuffer(function(err, buffer) { resolve(buffer); }); }); }); }, process: function(type, photo, width, height) { if (type == 'thumb') { return service.crop(photo, width, height); } else { return service.resize(photo, width, height); } } }; exports.handler = function(event, context, callback) { var $filename = event.filename, $session = event.session, $mimetype = null, $original = null, $watermarked = null; async.waterfall([ function(_next) { service.s3.download(BUCKET, FOLDER + $session + '/original/' + $filename).then(function(_original) { $mimetype = _original.ContentType; $original = _original.Body; _next(null); }, function(_error) { _next(_error); }); }, function(_next) { service.watermark.resize($original, (Math.random().toString(36).slice(-6) + new Date().getTime())).then(function(_watermarked) { $watermarked = _watermarked; _next(null); }); }, function(_next) { async.each(SIZES, function(_image, _callback) { service.process(_image.type, _image.mark ? $watermarked : $original, _image.size, _image.size).then(function(_generated) { service.s3.upload.async(_generated, BUCKET, FOLDER + $session + '/' + _image.type + '/' + (_image.type != 'card' ? _image.size + '_' : '') + $filename, $mimetype, _image.hide).then(function() { _callback(null); }); }); }, function() { _next(null); }); } ], function() { callback(null, 'Imagens processadas. Session: ' + $session); }); }; 

2 Answers 2

1

The older process it took long to finish, then lambda run out of memory.

I resolved this problem breaking the array in two Sizes. Then duplicating the function with async.each to the new array.

This new method run perfectly and create all images.

Sign up to request clarification or add additional context in comments.

Comments

0

I've had this error before and eventually traced it to hitting the memory limits. Fortunately, I was able to increase the size of my memory and that solved the problem.

Unfortunately, I see you're already on the maximum allowable memory and still hitting the limit. Just how big are these images that are being uploaded? If they really are big, and you need the output as one image, the only way I can see how you could do it would be to upload the huge image file, crop it to form x4 or more smaller images, resize each of those and then stick each one back together again.

1 Comment

The images is around 12mb. But I had problems with smaller images like 5mb, 7mb.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.