gpt4 book ai didi

node.js - GCF "No Such Object"当相关对象刚刚创建时

转载 作者:太空宇宙 更新时间:2023-11-04 01:28:34 24 4
gpt4 key购买 nike

我正在设置一个 Google Cloud Functions (GCF) 函数,该函数被频繁触发,以至于有多个实例同时运行。

我从 readStream 中收到错误,该流的源文件不存在,但此时在我的程序中我实际上刚刚创建了它。

我已通过console.log() 处理文件 JSON 确保该文件在流开始之前存在,因此该文件确实存在。我还确保我尝试访问的文件已由先前的流通过等待完成写入,但没有骰子。

编辑:代码现在包含整个脚本。似乎引发错误的部分是函数columnDelete()。

var parse = require('fast-csv');
var Storage = require('@google-cloud/storage');
var Transform = require('readable-stream').Transform;

var storage = new Storage();
var bucket = storage.bucket('<BUCKET>');
const DMSs = ['PBS','CDK','One_Eighty','InfoBahn'];


class DeleteColumns extends Transform{
constructor(){
super({objectMode:true})
}
_transform(row, enc, done){
//create an array 2 elements shorter than received
let newRow = new Array(row.length - 2);

//write all data but the first two columns
for(let i = 0; i < newRow.length; i++){
newRow[i] = row[i+2];
}

this.push(newRow.toString() + '\n');
done();
}
}


function rename(file, originalFile, DMS){
return new Promise((resolve, reject) => {
var dealer;
var date;
var header = true;
var parser = parse({delimiter : ",", quote:'\\'});

//for each row of data
var stream = originalFile.createReadStream();
stream.pipe(parser)
.on('data', (row)=>{


//if this is the first line do nothing
if(header){
header = false;
}
//otherwise record the contents of the first two columns and then destroy the stream
else {
dealer = row[0].toString().replace('"', '').replace('"', '');
date = row[1].toString().replace('"', '').replace('"', '');

stream.end();
}
})
.on('finish', function(){
var newName = dealer + ' ' + date + '_' + DMS + 'temp.csv';
//if this was not triggered by the renaming of a file
if(!file.name.includes(dealer)&&!file.name.includes(':')){
console.log('Renamed ' + file.name);
originalFile.copy(newName);
originalFile.copy(newName.replace('temp',''));
}else{
newName = 'Not Renamed';
console.log('Oops, triggered by the rename');
}
resolve(newName);

});
});

}
function columnDelete(fileName){
return new Promise((resolve, reject) =>{
console.log('Deleting Columns...');
console.log(bucket.file(fileName));
var parser = parse({delimiter : ",", quote:'\\'});
var del = new DeleteColumns();
var temp = bucket.file(fileName);
var final = bucket.file(fileName.replace('temp', ''));

//for each row of data
temp.createReadStream()
//parse the csv
.pipe(parser)
//delete first two columns
.pipe(del)
//write to new file
.pipe(final.createWriteStream()
.on('finish', function(){
console.log('Columns Deleted');
temp.delete();
resolve();
})
);
});

}

exports.triggerRename = async(data, context) => {
var DMS = 'Triple';
var file = data;
//if not a temporary file
if(!file.name.includes('temp')){

//create a new File object from the name of the data passed
const originalFile = bucket.file(file.name);

//identify which database this data is from
DMSs.forEach(function(database){
if(file.name.includes(database)){
DMS = database;
}
});
//rename the file
var tempName = await rename(file, originalFile, DMS);
//if it was renamed, delete the extra columns
if (!tempName.includes('Not Renamed')){
await columnDelete(tempName);
}

} else if(file.name.includes('undefined')){
console.log(file.name + ' is invalid. Deleted.');
bucket.file(file.name).delete();
}
else {
console.log( file.name + ' is a temporary file. Did not rename.');
}
};

我期望的输出如下:

Deleting Columns...
Columns Deleted

很好又简单,让我们知道它何时开始和结束。

但是,我得到的是这个:

Deleting Columns...
ApiError: No such object: <file> at at Object.parseHttpRespMessage(......)
finished with status: 'crash'

由于显而易见的原因,这不是想要的。我的下一个想法是确保该文件没有被脚本的另一个实例中途删除,但要做到这一点,我必须检查该文件是否正在被另一个流使用,据我所知,这是不可能的。

有什么想法吗?

最佳答案

当我创建文件时,我调用了异步函数copy()并继续,这意味着当尝试访问该文件时,它尚未完成复制。我不知道的是,文件对象是一个引用变量,实际上并不包含文件本身。复制文件时,指针存在,但指向未完成的文件。

因此,“没有这样的对象”。为了解决这个问题,我只是使用了一个回调来确保在访问文件之前复制已完成。

感谢 Doug Stevenson 让我了解指针!

关于node.js - GCF "No Such Object"当相关对象刚刚创建时,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/56761465/

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com