2017-04-26 208 views
0

我使用此Lambda函数即时生成缩略图。但我得到以下错误:AWS Lambda达到内存限制

REPORT RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Duration: 188.18 ms Billed Duration: 200 ms Memory Size: 1536 MB Max Memory Used: 1536 MB 

AND ...

RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Process exited before completing request 

因此,我认为我达到最大内存限制。没有函数“uploadRecentImage()”它就起作用了。但是,如果我添加一个新的大小imgVariants []我也会打内存限制。 我认为这个函数处理imgVariants(每个循环)的方式会导致这种情况,但我不知道如何让它变得更好。 我将不胜感激任何帮助。

这里是我的功能:

// dependencies 
var async = require('async'); 
var AWS = require('aws-sdk'); 
var gm = require('gm').subClass({ 
    imageMagick: true 
}); // use ImageMagick 
var util = require('util'); 

// configuration as code - add, modify, remove array elements as desired 
var imgVariants = [ 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 6000, 
    "MAX_HEIGHT": 6000, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    }, 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 1280, 
    "MAX_HEIGHT": 1280, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    }, 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 500, 
    "MAX_HEIGHT": 500, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    }, 
    { 
    "SIZE": "Large1", 
    "POSTFIX": "-l", 
    "MAX_WIDTH": 100, 
    "MAX_HEIGHT": 100, 
    "SIZING_QUALITY": 75, 
    "INTERLACE": "Line" 
    } 
]; 
var DST_BUCKET_POSTFIX = "resized"; 



// get reference to S3 client 
var s3 = new AWS.S3(); 

exports.handler = function (event, context) { 
    // Read options from the event. 
    console.log("Reading options from event:\n", util.inspect(event, { 
    depth: 5 
    })); 
    var srcBucket = event.Records[0].s3.bucket.name; 
    // Object key may have spaces or unicode non-ASCII characters. 
    var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " ")); 
    // derive the file name and extension 
    var srcFile = srcKey.match(/(.+)\.([^.]+)/); 

    var srcName = srcFile[1]; 
    var scrExt = srcFile[2]; 
    // set the destination bucket 
    var dstBucket = srcBucket + DST_BUCKET_POSTFIX; 


    // make sure that source and destination are different buckets. 
    if (srcBucket === dstBucket) { 
    console.error("Destination bucket must be different from source bucket."); 
    return; 
    } 

    if (!scrExt) { 
    console.error('unable to derive file type extension from file key ' + srcKey); 
    return; 
    } 

    if (scrExt != "jpg" && scrExt != "png") { 
    console.log('skipping non-supported file type ' + srcKey + ' (must be jpg or png)'); 
    return; 
    } 

    function processImage(data, options, callback) { 
    gm(data.Body).size(function (err, size) { 

     var scalingFactor = Math.min(
     options.MAX_WIDTH/size.width, 
     options.MAX_HEIGHT/size.height 
    ); 
     var width = scalingFactor * size.width; 
     var height = scalingFactor * size.height; 

     this.resize(width, height) 
     .quality(options.SIZING_QUALITY || 75) 
     .interlace(options.INTERLACE || 'None') 
     .toBuffer(scrExt, function (err, buffer) { 
      if (err) { 
      callback(err); 

      } else { 
      uploadImage(data.ContentType, buffer, options, callback); 
      uploadRecentImage(data.ContentType, buffer, options, callback); 
      } 
     }); 
    }); 
    } 

    function uploadImage(contentType, data, options, callback) { 
    // Upload the transformed image to the destination S3 bucket. 
    s3.putObject({ 
     Bucket: dstBucket, 
     Key: options.MAX_WIDTH + '/' + srcName + '.' + scrExt, 
     Body: data, 
     ContentType: contentType 
     }, 
     callback); 
    } 


    function uploadRecentImage(contentType, data, options, callback) { 
    if(options.MAX_WIDTH == 500){ 
     s3.putObject({ 
      Bucket: dstBucket, 
      Key: 'recent_optimized.' + scrExt, 
      Body: data, 
      ContentType: contentType 
      }, 
      callback); 
    } 
    if(options.MAX_WIDTH == 100){ 
      s3.putObject({ 
      Bucket: dstBucket, 
      Key: 'recent_thumb.' + scrExt, 
      Body: data, 
      ContentType: contentType 
      }, 
      callback); 
    } 
    } 


    // Download the image from S3 and process for each requested image variant. 
    async.waterfall(
    [ 
     function download(next) { 
      // Download the image from S3 into a buffer. 
      s3.getObject({ 
       Bucket: srcBucket, 
       Key: srcKey 
      }, 
      next); 
     }, 
     function processImages(data, next) { 
      async.each(imgVariants, function (variant, next) { 
      processImage(data, variant, next); 
      }, next); 

     } 

    ], 
    function (err) { 
     if (err) { 
     console.error(
      'Unable to resize ' + srcBucket + '/' + srcKey + 
      ' and upload to ' + dstBucket + 
      ' due to an error: ' + err 
     ); 
     } else { 
     console.log(
      'Successfully resized ' + srcBucket + '/' + srcKey + 
      ' and uploaded to ' + dstBucket 
     ); 
     } 

     context.done(); 
    } 
); 
}; 
+1

虽然循环,你真的需要保持已处理的图像在内存中吗?为什么不让你的源码在循环中调用'lambda'函数而不是在lambda中循环? – kosa

+0

如果您需要1.5GB来处理您的缩略图,那么您的内存泄漏问题很严重。 *不要*尝试一次处理所有图像。 *除非必须,否则不要在内存中保留任何内容。处理完每张图像后清理。 *不要*使用全局结构,特别是如果你想并行处理图像。全局结构不可能清理 –

+0

或者,您可以考虑使用“调整大小作为服务”系统,如[Cloudinary](http://cloudinary.com/)和[Imgix](http://imgix.com/) 。 –

回答

0
  1. 您可以限制并行processImages调用的次数:

更换async.each(imgVariants,

async.eachLimit(imgVariants, 2,

不处理多比在两个图像平行。

  • 脚本有一个错误:
  • uploadImage(data.ContentType, buffer, options, callback); uploadRecentImage(data.ContentType, buffer, options, callback); 这将调用这是不允许的callback两次。只调用一次回调!

    1. 脚本有另一个bug:event.Records[0]它只会处理第一张图片。如果您同时上传多张图片,则会丢失一些图片。
    +0

    1.但是,比所有imgVariants处理?还是只有第2个?如果我这样做,我没有内存错误,但也没有调整大小的图像:-( – MarkusHH