gpt4 book ai didi

node.js - 使用 NodeJS 将多个文件上传到 AWS S3

转载 作者:搜寻专家 更新时间:2023-10-31 23:01:30 26 4
gpt4 key购买 nike

我正在尝试使用 NodeJS 将目录中的所有文件上传到我的 S3 存储桶。如果我明确地为 Key: 字段提供文件路径 + 文字字符串,我就可以一次上传一个文件。

下面是我正在使用的脚本:

var AWS = require('aws-sdk'),
fs = require('fs');

// For dev purposes only
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });

// reg ex to match
var re = /\.txt$/;

// ensure that this file is in the directory of the files you want to run the cronjob on

// ensure that this file is in the directory of the files you want to run the cronjob on
fs.readdir(".", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}


var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length


if ( numFiles ) {
// Read in the file, convert it to base64, store to S3

for( i = 0; i < numFiles; i++ ) {
var fileName = matches[i]

fs.readFile(fileName, function (err, data) {
if (err) { throw err }

// Buffer Pattern; how to handle buffers; straw, intake/outtake analogy
var base64data = new Buffer(data, 'binary');


var s3 = new AWS.S3()
s3.putObject({
'Bucket': 'noonebetterhaventakenthisbucketnname',
'Key': fileName,
'Body': base64data,
'ACL': 'public-read'
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded, ', fileName)
})
})

}

}

})

它为每个试图上传到 S3 的文件产生这个错误:

These are the files you have [ 'test.txt', 'test2.txt' ]
{ '0': null,
'1': { ETag: '"2cad20c19a8eb9bb11a9f76527aec9bc"' } }
Successfully uploaded, test2.txt
{ '0': null,
'1': { ETag: '"2cad20c19a8eb9bb11a9f76527aec9bc"' } }
Successfully uploaded, test2.txt

编辑:使用变量名称更新以允许读取 key 而不是matches[i]

为什么它只上传 test2.txt,我如何让它上传我的 matches 变量中的每个文件?

最佳答案

引用此 Asynchronously reading and caching multiple files in nodejs达成解决方案。

tl;dr 范围问题 - 需要将变量包装在闭包中;可以通过为 readFiles3.putObject 创建一个函数并在 for 循环中调用它来做到这一点。

var AWS = require('aws-sdk'),
fs = require('fs');

// For dev purposes only
AWS.config.update({ accessKeyId: '...', secretAccessKey: '...' });

var s3 = new AWS.S3()

function read(file) {
fs.readFile(file, function (err, data) {
if (err) { throw err }

// Buffer Pattern; how to handle buffers; straw, intake/outtake analogy
var base64data = new Buffer(data, 'binary');

s3.putObject({
'Bucket': 'noonebetterhaventakenthisbucketnname',
'Key': file,
'Body': base64data,
'ACL': 'public-read'
}, function (resp) {
console.log(arguments);
console.log('Successfully uploaded, ', file)
})
})
}

// reg ex to match
var re = /\.txt$/;

// ensure that this file is in the directory of the files you want to run the cronjob on
fs.readdir(".", function(err, files) {
if (err) {
console.log( "Could not list the directory.", err)
process.exit( 1 )
}

var matches = files.filter( function(text) { return re.test(text) } )
console.log("These are the files you have", matches)
var numFiles = matches.length


if ( numFiles ) {
// Read in the file, convert it to base64, store to S3

for( i = 0; i < numFiles; i++ ) {
read(matches[i])
}

}

})

关于node.js - 使用 NodeJS 将多个文件上传到 AWS S3,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/43662686/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com