使用 Lambda 节点从 S3 上的文件在 S3 上创建一个 zip 文件 [英] Create a zip file on S3 from files on S3 using Lambda Node

查看:34
本文介绍了使用 Lambda 节点从 S3 上的文件在 S3 上创建一个 zip 文件的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我需要创建一个 Zip 文件,其中包含位于我的 s3 存储桶中的一系列文件(视频和图像).

I need to create a Zip file that consists of a selection of files (videos and images) located in my s3 bucket.

目前使用我下面的代码的问题是我很快就达到了 Lambda 的内存限制.

The problem at the moment using my code below is that I quickly hit the memory limit on Lambda.

async.eachLimit(files, 10, function(file, next) {
    var params = {
        Bucket: bucket, // bucket name
        Key: file.key
    };
    s3.getObject(params, function(err, data) {
        if (err) {
            console.log('file', file.key);
            console.log('get image files err',err, err.stack); // an error occurred
        } else {
            console.log('file', file.key);
            zip.file(file.key, data.Body);
            next();
        }
    });
}, 
function(err) {
    if (err) {
        console.log('err', err);
    } else {
        console.log('zip', zip);
        content = zip.generateNodeStream({
            type: 'nodebuffer',
            streamFiles:true
        });
        var params = {
            Bucket: bucket, // name of dest bucket
            Key: 'zipped/images.zip',
            Body: content
        };
        s3.upload(params, function(err, data) {
            if (err) {
                console.log('upload zip to s3 err',err, err.stack); // an error occurred
            } else {
                console.log(data); // successful response
            }
        });
    }
});

  • 这是否可以使用 Lambda,或者我应该看看不同的方法?

    • Is this possible using Lambda, or should I look at a different approach?

      是否可以动态写入压缩的 zip 文件,从而在一定程度上消除内存问题,或者我是否需要在压缩之前收集文件?

      Is it possible to write to a compressed zip file on the fly, therefore eliminating the memory issue somewhat, or do I need to have the files collected before compression?

      任何帮助将不胜感激.

      推荐答案

      好的,我今天必须这样做并且它有效.直接缓冲到流,不涉及磁盘.所以内存或磁盘限制在这里不会成为问题:

      Okay, I got to do this today and it works. Direct Buffer to Stream, no disk involved. So memory or disk limitation won't be an issue here:

      'use strict';
      
      const AWS = require("aws-sdk");
      AWS.config.update( { region: "eu-west-1" } );
      const s3 = new AWS.S3( { apiVersion: '2006-03-01'} );
      
      const   _archiver = require('archiver');
      
      //This returns us a stream.. consider it as a real pipe sending fluid to S3 bucket.. Don't forget it
      const streamTo = (_bucket, _key) => {
      	var stream = require('stream');
      	var _pass = new stream.PassThrough();
      	s3.upload( { Bucket: _bucket, Key: _key, Body: _pass }, (_err, _data) => { /*...Handle Errors Here*/ } );
      	return _pass;
      };
            
      exports.handler = async (_req, _ctx, _cb) => {
      	var _keys = ['list of your file keys in s3'];
      	
          var _list = await Promise.all(_keys.map(_key => new Promise((_resolve, _reject) => {
                  s3.getObject({Bucket:'bucket-name', Key:_key})
                      .then(_data => _resolve( { data: _data.Body, name: `${_key.split('/').pop()}` } ));
              }
          ))).catch(_err => { throw new Error(_err) } );
      
          await new Promise((_resolve, _reject) => { 
              var _myStream = streamTo('bucket-name', 'fileName.zip');		//Now we instantiate that pipe...
              var _archive = _archiver('zip');
              _archive.on('error', err => { throw new Error(err); } );
              
              //Your promise gets resolved when the fluid stops running... so that's when you get to close and resolve
              _myStream.on('close', _resolve);
              _myStream.on('end', _resolve);
              _myStream.on('error', _reject);
              
              _archive.pipe(_myStream);			//Pass that pipe to _archive so it can push the fluid straigh down to S3 bucket
              _list.forEach(_itm => _archive.append(_itm.data, { name: _itm.name } ) );		//And then we start adding files to it
              _archive.finalize();				//Tell is, that's all we want to add. Then when it finishes, the promise will resolve in one of those events up there
          }).catch(_err => { throw new Error(_err) } );
          
          _cb(null, { } );		//Handle response back to server
      };

      这篇关于使用 Lambda 节点从 S3 上的文件在 S3 上创建一个 zip 文件的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆