node.js - 使用 nodeJs 和 AWS 调整图像大小

标签 node.js amazon-web-services image-resizing

我正在尝试使用 nodejs 从 AWS S3 存储桶中获取图像,将其大小调整为 4 种不同的大小,然后将其保存回同一个存储桶但又保存到一个文件夹中,该文件夹又包含 4 个文件夹,每个都用于新尺寸。

当我运行该函数时,我收到以下错误:

Unable to resize devimageresize/diavelBlack.jpg and upload to / due to an error: Error: Stream yields empty buffer

我对 nodejs 比较陌生,不确定我是否正确编写代码。是什么导致了这个错误?

这是我的代码:

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm');
var util = require('util');


// get reference to S3 client
var s3 = new AWS.S3();

exports.handler = function(event, context) {
    // Read options from the event.
    console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
    var srcBucket = event.Records[0].s3.bucket.name;
    var srcKey    = event.Records[0].s3.object.key;
    var dstBucket = event.Records[0].s3.dst;

    var _800px = {
        width: 800,
        dstKey: 800 + srcKey,
        dstBucket: dstBucket.large
    };

    var _500px = {
        width: 500,
        dstKey: 500 + srcKey,
        dstBucket: dstBucket.medium
    };

    var _200px = {
        width: 200,
        dstKey: 200 + srcKey,
        dstBucket: dstBucket.small
    };

    var _45px = {
        width: 45,
        dstKey: 45 + srcKey,
        dstBucket: dstBucket.thumbnail
    };

    var _sizesArray = [_800px, _500px, _200px, _45px];

    var len = _sizesArray.length;

    // Sanity check: validate that source and destination are same buckets.
    if (srcBucket == dstBucket) {
        console.error("Destination bucket must match source bucket.");
    }

    // Infer the image type.
    var typeMatch = srcKey.match(/\.([^.]*)$/);
    if (!typeMatch) {
        console.error('unable to infer image type for key ' + srcKey);
        return;
    }
    var imageType = typeMatch[1];
    if (imageType != "jpg" && imageType != "png") {
        console.log('skipping non-image ' + srcKey);
        return;
    }

    // Download the image from S3, transform, and upload to same S3 bucket but different folders.
    async.waterfall([
            function download(next) {
                // Download the image from S3 into a buffer.
                s3.getObject({
                        Bucket: srcBucket,
                        Key: srcKey
                    },
                    next);
            },

            function transform(response, next) {


                for (var i = 0; i<len; i++) {

                    // Transform the image buffer in memory.
                    gm(response.Body).resize(_sizesArray[i].width)
                        .toBuffer(imageType, function(err, buffer) {
                            if (err) {
                                next(err);
                            } else {
                                next(null, response.ContentType, buffer);
                            }
                        });
                }
            },

            function upload(contentType, data, next) {

                for (var i = 0; i<len; i++) {

                    // Stream the transformed image to a different S3 bucket.
                    s3.putObject({
                            Bucket: _sizesArray[i].dstBucket,
                            Key: _sizesArray[i].dstKey,
                            Body: data,
                            ContentType: contentType
                        },
                        next);
                }
            }

        ], function (err) {
            if (err) {
                console.error(
                    'Unable to resize ' + srcBucket + '/' + srcKey +
                    ' and upload to ' + dstBucket + '/' +
                    ' due to an error: ' + err
                );
            } else {
                console.log(
                    'Successfully resized ' + srcBucket + '/' + srcKey +
                    ' and uploaded to ' + dstBucket
                );
            }

            context.done();
        }
    );
};

最佳答案

搞定了。主要原因是需要将一个额外的参数传递给 gm(response.Body, srcKey)

完整代码:

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({ imageMagick: true });
var util = require('util');


// get reference to S3 client
var s3 = new AWS.S3();

exports.handler = function(event, context) {
    // Read options from the event.
    console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
    var srcBucket = event.Records[0].s3.bucket.name;
    var srcKey = event.Records[0].s3.object.key;

    var _800px = {
        width: 800,
        dstnKey: srcKey,
        destinationPath: "large"
    };

    var _500px = {
        width: 500,
        dstnKey: srcKey,
        destinationPath: "medium"
    };

    var _200px = {
        width: 200,
        dstnKey: srcKey,
        destinationPath: "small"
    };

    var _45px = {
        width: 45,
        dstnKey: srcKey,
        destinationPath: "thumbnail"
    };

    var _sizesArray = [_800px, _500px, _200px, _45px];

    var len = _sizesArray.length;

    console.log(len);
    console.log(srcBucket);
    console.log(srcKey);

    // Infer the image type.
    var typeMatch = srcKey.match(/\.([^.]*)$/);
    if (!typeMatch) {
        console.error('unable to infer image type for key ' + srcKey);
        return;
    }
    var imageType = typeMatch[1];
    if (imageType != "jpg" && imageType != "png") {
        console.log('skipping non-image ' + srcKey);
        return;
    }

    // Download the image from S3, transform, and upload to same S3 bucket but different folders.
    async.waterfall([
            function download(next) {
                // Download the image from S3 into a buffer.

                s3.getObject({
                        Bucket: srcBucket,
                        Key: srcKey
                    },
                    next);
            },

            function transform(response, next) {


                for (var i = 0; i<len; i++) {

                    // Transform the image buffer in memory.
                    gm(response.Body, srcKey)
                        .resize(_sizesArray[i].width)
                        .toBuffer(imageType, function(err, buffer) {
                            if (err) {
                                next(err);

                            } else {
                                next(null, response.ContentType, buffer);
                            }
                        });
                }
            },

            function upload(contentType, data, next) {

                for (var i = 0; i<len; i++) {

                    // Stream the transformed image to a different folder.
                    s3.putObject({
                            Bucket: srcBucket,
                            Key: "dst/" + _sizesArray[i].destinationPath + "/" + _sizesArray[i].dstnKey,
                            Body: data,
                            ContentType: contentType
                        },
                        next);
                }
            }

        ], function (err) {
            if (err) {
                console.error(
                    '---->Unable to resize ' + srcBucket + '/' + srcKey +
                    ' and upload to ' + srcBucket + '/dst' +
                    ' due to an error: ' + err
                );
            } else {
                console.log(
                    '---->Successfully resized ' + srcBucket +
                    ' and uploaded to' + srcBucket + "/dst"
                );
            }

            context.done();
        }
    );
};

关于node.js - 使用 nodeJs 和 AWS 调整图像大小,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/28741131/

相关文章:

amazon-web-services - boto3.exceptions.S3UploadFailedError : An error occurred (AccessDenied) when calling the PutObject operation: Access Denied

java - 使用 Java 调整图像大小

html - 在图像 HTML 上自动调整文本大小

node.js - 使用 Express 和 Sequelize 与 SQL Server 登录失败

javascript - Node 文件系统在后路由上创建目录和文件

amazon-web-services - 加载文件后如何清理 AWS Firehose 使用的 S3 文件?

javascript - 图像在浏览器中调整大小后如何运行函数? (JS/JQuery)

javascript - 同时更新 2 个不同的集合

node.js - Firebase 函数仅返回 null

amazon-web-services - ECS 任务的任务执行角色 - Cloudformation