gpt4 book ai didi

node.js - 使用 Lambda Node 从 S3 上的文件在 S3 上创建一个 zip 文件

转载 作者:IT老高 更新时间:2023-10-28 23:00:16 24 4
gpt4 key购买 nike

我需要创建一个 Zip 文件,其中包含位于我的 s3 存储桶中的一系列文件(视频和图像)。

目前使用下面的代码的问题是我很快就达到了 Lambda 的内存限制。

async.eachLimit(files, 10, function(file, next) {
var params = {
Bucket: bucket, // bucket name
Key: file.key
};
s3.getObject(params, function(err, data) {
if (err) {
console.log('file', file.key);
console.log('get image files err',err, err.stack); // an error occurred
} else {
console.log('file', file.key);
zip.file(file.key, data.Body);
next();
}
});
},
function(err) {
if (err) {
console.log('err', err);
} else {
console.log('zip', zip);
content = zip.generateNodeStream({
type: 'nodebuffer',
streamFiles:true
});
var params = {
Bucket: bucket, // name of dest bucket
Key: 'zipped/images.zip',
Body: content
};
s3.upload(params, function(err, data) {
if (err) {
console.log('upload zip to s3 err',err, err.stack); // an error occurred
} else {
console.log(data); // successful response
}
});
}
});
  • 这是否可以使用 Lambda,或者我应该看看不同的方法?

  • 是否可以即时写入压缩的 zip 文件,从而在一定程度上消除内存问题,还是我需要在压缩之前收集文件?

任何帮助将不胜感激。

最佳答案

好的,我今天必须这样做,它有效。 Direct Buffer to Stream,不涉及磁盘。所以内存或磁盘限制在这里不会成为问题:

'use strict';

const AWS = require("aws-sdk");
AWS.config.update( { region: "eu-west-1" } );
const s3 = new AWS.S3( { apiVersion: '2006-03-01'} );

const _archiver = require('archiver');

//This returns us a stream.. consider it as a real pipe sending fluid to S3 bucket.. Don't forget it
const streamTo = (_bucket, _key) => {
var stream = require('stream');
var _pass = new stream.PassThrough();
s3.upload( { Bucket: _bucket, Key: _key, Body: _pass }, (_err, _data) => { /*...Handle Errors Here*/ } );
return _pass;
};

exports.handler = async (_req, _ctx, _cb) => {
var _keys = ['list of your file keys in s3'];

var _list = await Promise.all(_keys.map(_key => new Promise((_resolve, _reject) => {
s3.getObject({Bucket:'bucket-name', Key:_key})
.then(_data => _resolve( { data: _data.Body, name: `${_key.split('/').pop()}` } ));
}
))).catch(_err => { throw new Error(_err) } );

await new Promise((_resolve, _reject) => {
var _myStream = streamTo('bucket-name', 'fileName.zip'); //Now we instantiate that pipe...
var _archive = _archiver('zip');
_archive.on('error', err => { throw new Error(err); } );

//Your promise gets resolved when the fluid stops running... so that's when you get to close and resolve
_myStream.on('close', _resolve);
_myStream.on('end', _resolve);
_myStream.on('error', _reject);

_archive.pipe(_myStream); //Pass that pipe to _archive so it can push the fluid straigh down to S3 bucket
_list.forEach(_itm => _archive.append(_itm.data, { name: _itm.name } ) ); //And then we start adding files to it
_archive.finalize(); //Tell is, that's all we want to add. Then when it finishes, the promise will resolve in one of those events up there
}).catch(_err => { throw new Error(_err) } );

_cb(null, { } ); //Handle response back to server
};

关于node.js - 使用 Lambda Node 从 S3 上的文件在 S3 上创建一个 zip 文件,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/38633577/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com