gpt4 book ai didi

node.js - 使用强大的和(knox 或 aws-sdk)在 Node.js 上将文件流式上传到 S3

转载 作者:IT老高 更新时间:2023-10-28 23:07:56 27 4
gpt4 key购买 nike

我正在尝试将通过表单提交的文件直接上传到 Amazon S3 存储桶,使用 aws-sdkknox .表单处理由 formidable 完成.

我的问题是:如何使用这些库处理流的最新功能正确使用 aws-sdk(或 knox)的强大功能?

我知道这个话题已经在这里以不同的方式提出过,即:

但是,我认为答案有点过时和/或偏离主题(即 CORS 支持,由于各种原因我现在不想使用它)和/或最重要的是,没有提及aws-sdk(参见:https://github.com/aws/aws-sdk-js/issues/13#issuecomment-16085442)或 knox(尤其是 putStream() 或其 readableStream.pipe(req) 变体 both explained in the doc)的最新功能。

经过几个小时的挣扎,我得出的结论是我需要一些帮助(免责声明:我是流的新手)。

HTML 表单:

<form action="/uploadPicture" method="post" enctype="multipart/form-data">
<input name="picture" type="file" accept="image/*">
<input type="submit">
</form>

Express bodyParser 中间件是这样配置的:

app.use(express.bodyParser({defer: true}))

POST 请求处理程序:

uploadPicture = (req, res, next) ->
form = new formidable.IncomingForm()
form.parse(req)

form.onPart = (part) ->
if not part.filename
# Let formidable handle all non-file parts (fields)
form.handlePart(part)
else
handlePart(part, form.bytesExpected)

handlePart = (part, fileSize) ->
# aws-sdk version
params =
Bucket: "mybucket"
Key: part.filename
ContentLength: fileSize
Body: part # passing stream object as body parameter

awsS3client.putObject(params, (err, data) ->
if err
console.log err
else
console.log data
)

但是,我收到以下错误:

{ [RequestTimeout: Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.]

message: 'Your socket connection to the server was not read from or written to within the timeout period. Idle connections will be closed.', code: 'RequestTimeout', name: 'RequestTimeout', statusCode: 400, retryable: false }

以这种方式定制的 handlePart() 函数的 knox 版本也惨遭失败:

handlePart = (part, fileSize) ->
headers =
"Content-Length": fileSize
"Content-Type": part.mime
knoxS3client.putStream(part, part.filename, headers, (err, res) ->
if err
console.log err
else
console.log res
)

我还在某处获得了一个带有 400 statusCode 的大 res 对象。

Region 在这两种情况下都配置为 eu-west-1

补充说明:

node 0.10.12

latest formidable from npm (1.0.14)

latest aws-sdk from npm (1.3.1)

latest knox from npm (0.8.3)

最佳答案

使用 AWS S3 的 multipartUpload(s3-upload-stream 作为工作模块)和 Node 强大的 readable stream ,您可以像 this 一样通过管道上传流:

var formidable = require('formidable');
var http = require('http');
var util = require('util');
var AWS = require('aws-sdk');
var config = require('./config');
var s3 = new AWS.S3({
accessKeyId: config.get('S3_ACCESS_KEY'),
secretAccessKey: config.get('S3_SECRET_KEY'),
apiVersion: '2006-03-01'
});
var s3Stream = require('s3-upload-stream')(s3);
var bucket = 'bucket-name';
var key = 'abcdefgh';


http.createServer(function(req, res) {

if (req.url == '/upload' && req.method.toLowerCase() == 'post') {

var form = new formidable.IncomingForm();
form.on('progress', function(bytesReceived, bytesExpected) {
//console.log('onprogress', parseInt( 100 * bytesReceived / bytesExpected ), '%');
});

form.on('error', function(err) {
console.log('err',err);
});

// This 'end' is for the client to finish uploading
// upload.on('uploaded') is when the uploading is
// done on AWS S3
form.on('end', function() {
console.log('ended!!!!', arguments);
});

form.on('aborted', function() {
console.log('aborted', arguments);
});

form.onPart = function(part) {
console.log('part',part);
// part looks like this
// {
// readable: true,
// headers:
// {
// 'content-disposition': 'form-data; name="upload"; filename="00video38.mp4"',
// 'content-type': 'video/mp4'
// },
// name: 'upload',
// filename: '00video38.mp4',
// mime: 'video/mp4',
// transferEncoding: 'binary',
// transferBuffer: ''
// }

var start = new Date().getTime();
var upload = s3Stream.upload({
"Bucket": bucket,
"Key": part.filename
});

// Optional configuration
//upload.maxPartSize(20971520); // 20 MB
upload.concurrentParts(5);

// Handle errors.
upload.on('error', function (error) {
console.log('errr',error);
});
upload.on('part', function (details) {
console.log('part',details);
});
upload.on('uploaded', function (details) {
var end = new Date().getTime();
console.log('it took',end-start);
console.log('uploaded',details);
});

// Maybe you could add compress like
// part.pipe(compress).pipe(upload)
part.pipe(upload);
};

form.parse(req, function(err, fields, files) {
res.writeHead(200, {'content-type': 'text/plain'});
res.write('received upload:\n\n');
res.end(util.inspect({fields: fields, files: files}));
});
return;
}

// show a file upload form
res.writeHead(200, {'content-type': 'text/html'});
res.end(
'<form action="/upload" enctype="multipart/form-data" method="post">'+
'<input type="text" name="title"><br>'+
'<input type="file" name="upload" multiple="multiple"><br>'+
'<input type="submit" value="Upload">'+
'</form>'
);
}).listen(8080);

关于node.js - 使用强大的和(knox 或 aws-sdk)在 Node.js 上将文件流式上传到 S3,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/17309559/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com