gpt4 book ai didi

node.js - 超出最大堆栈大小 - 事件流

转载 作者:太空宇宙 更新时间:2023-11-03 23:21:38 25 4
gpt4 key购买 nike

有这段代码:

function query(url, dbName, collection, filter, requestId) {
MongoClient.connect(url, {native_parser:true, authSource:'admin'}, function(err, client) {
if (err) {
throw err;
}
const db = client.db(dbName);
var stream = db.collection(collection).find(filter, {fields:{_id: 0}}).stream();

var fileName = '/opt/' + requestId + '.txt';
var writer = fs.createWriteStream(fileName);
writer.write('[\n');

stream.on('end', function(){
writer.write('\n]');
});

stream.pipe(es.map(function (doc, next) {
doc = JSON.stringify(doc);
next(null, doc);
})).pipe(es.join(',\n')).pipe(writer).on('close', function(){
sftp.put(fileName, '/opt/' + requestId + '.txt')
.then(logger.info('Done uploading the file via SFTP'));

mqttClient.publish('response', 'The CSV for requestId has been uploaded FTP');
});
});
}

问题是,当查询返回大量文档时,该函数将失败

/node_modules/map-stream/index.js:103
throw err
^

RangeError: Maximum call stack size exceeded
at Stream.ondata (internal/streams/legacy.js:14:18)
at emitOne (events.js:116:13)
at Stream.emit (events.js:211:7)
at Stream.<anonymous> (/node_modules/event-stream/index.js:298:12)
at Stream.stream.write (/node_modules/through/index.js:26:11)
at Stream.ondata (internal/streams/legacy.js:16:26)
at emitOne (events.js:116:13)
at Stream.emit (events.js:211:7)
at queueData (/node_modules/map-stream/index.js:43:21)
at next (/node_modules/map-stream/index.js:71:7)
at /node_modules/map-stream/index.js:85:7
at /opt/subscriber.js:84:7
at wrappedMapper (/node_modules/map-stream/index.js:84:19)
at Stream.stream.write (/node_modules/map-stream/index.js:96:21)
at Cursor.ondata (_stream_readable.js:639:20)
at emitOne (events.js:116:13)

这个函数的作用是获取一个过滤器,根据过滤器运行 mongodb 查询,并将结果文档写入一个文件,然后通过 ftp 进行传输。

函数在 next(null, doc); 处失败

关于如何改进代码而不必增加调用堆栈大小的任何建议?

最佳答案

虽然它看起来很受欢迎,但我从未使用过这个库。您可以尝试使用 steam 的事件来进行操作并看看它是否有效吗?

function query(url, dbName, collection, filter, requestId) {
MongoClient.connect(url, {native_parser: true, authSource: 'admin'}, function (err, client) {
if (err) {
throw err;
}
const db = client.db(dbName);
var stream = db.collection(collection).find(filter, {fields: {_id: 0}}).stream();

var fileName = '/opt/' + requestId + '.txt';
var writer = fs.createWriteStream(fileName);
writer.write('[\n');

stream.on('data', function (doc) {
writer.write(`${JSON.stringify(doc)}\n`);
});

stream.on('end', function () {
writer.write('\n]');
sftp.put(fileName, '/opt/' + requestId + '.txt')
.then(logger.info('Done uploading the file via SFTP'));

mqttClient.publish('response', 'The CSV for requestId has been uploaded FTP');
});
});
}

关于node.js - 超出最大堆栈大小 - 事件流,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/49023745/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com