作者热门文章
- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
目标
使用 Google Drive APIs Resumable URL 将文件下载并上传到纯内存中的 Google Drive。
挑战/问题
I want to buffer the file as its being downloaded to memory (not filesystem) and subsequently upload to Google Drive.Google Drive API requires chunks to be a minimum length of
256 * 1024, (262144 bytes)
.
The process should pass a chunk from the buffer to be uploaded. If the chunk errors, that buffer chunk is retried up to 3 times. If the chunk succeeds, that chunk from the buffer should be cleared, and the process should continue until complete.
filesystem
作为可读流的起点。
passthrough
与
highWaterMark
和第三方库,例如
request
,
gaxios
, 和
got
内置流/管道支持,但在过程的上传端无济于事。
piping
或
chunking
机制,是否带
buffer
或
pipeline
正确地流向上传过程直到完成,并以有效的方式处理进度和完成事件。
PUT
到谷歌提供的正确 URL 的 Content-Length
和 Content-Range
头,同时有足够的缓冲区空间来处理 3 次重试?.cork()
和 .uncork()
管理缓冲流的有效方法?Transform
与 highWaterMark
一起流和 pipeline
有效地管理缓冲区?例如...pipeline(
downloadStream,
transformStream,
uploadStream,
(err) => {
if (err) {
reject(err)
} else {
resolve(true)
}
}
)
下面是一个可视化模型和我想要完成的代码:
[====================]
File Length (20 MB)
[========== ]
Download (10 MB)
[====== ]
Buffer (e.g. 6 MB, size 12 MB)
[===]
Upload Chunk (3 MB) => Error? Retry from Buffer (max 3 times)
=> Success? Empty Buffer => Continue =>
[===]
Upload next Chunk (3 MB)
代码
/*
Assume resumable_drive_url was already obtained from Google API
with the proper access token, which already contains the
Content-Type and Content-Length in the session.
*/
transfer(download_url, resumable_drive_url, file_type, file_length) {
return new Promise((resolve, reject) => {
let timeout = setTimeout(() => {
reject(new Error("Transfer timed out."))
}, 80000)
// Question #1: Should the passthrough stream
// and .on events be declared here?
const passthrough = new stream.PassThrough({
highWaterMark: 256 * 1024
})
passthrough.on("error", (error) => {
console.error(`Upload failed: ${error.message}`)
reject(error.message)
})
passthrough.on("end", () => {
clearTimeout(timeout)
resolve(true)
})
// Download file
axios({
method: 'get',
url: download_url,
responseType: 'stream',
maxRedirects: 1
}).then(result => {
// QUESTION #2: How do we buffer the file from here
// via axios.put to the resumable_url with the correct
// header information Content-Range and Content-Length?
// CURIOSITY #1: Do we pipe from here
// to a passthrough stream that maintains a minimum buffer size?
result.data.pipe(passthrough)
}
).catch(error => {
reject(error)
})
})
}
引用文献
最佳答案
我相信你的目标和现状如下。
stream.Transform
. main()
.
const axios = require("axios");
const stream = require("stream");
function transfer(
download_url,
resumable_drive_url,
file_type,
file_length,
accessToken,
filename,
chunkSize
) {
return new Promise((resolve, reject) => {
axios({
method: "get",
url: download_url,
responseType: "stream",
maxRedirects: 1,
})
.then((result) => {
const streamTrans = new stream.Transform({
transform: function (chunk, _, callback) {
callback(null, chunk);
},
});
// 1. Retrieve session for resumable upload.
axios({
method: "POST",
url: resumable_drive_url,
headers: {
Authorization: `Bearer ${accessToken}`,
"Content-Type": "application/json",
},
data: JSON.stringify({
name: filename,
mimeType: file_type,
}),
})
.then(({ headers: { location } }) => {
// 2. Upload the file.
let startByte = 0;
result.data.pipe(streamTrans);
let bufs = [];
streamTrans.on("data", async (chunk) => {
bufs.push(chunk);
const temp = Buffer.concat(bufs);
if (temp.length >= chunkSize) {
const dataChunk = temp.slice(0, chunkSize);
const left = temp.slice(chunkSize);
streamTrans.pause();
let upcount = 0;
const upload = function () {
console.log(
`Progress: from ${startByte} to ${
startByte + dataChunk.length - 1
} for ${file_length}`
);
axios({
method: "PUT",
url: location,
headers: {
"Content-Range": `bytes ${startByte}-${
startByte + dataChunk.length - 1
}/${file_length}`,
},
data: dataChunk,
})
.then(({ data }) => resolve(data))
.catch((err) => {
if (err.response.status == 308) {
startByte += dataChunk.length;
streamTrans.resume();
return;
}
if (upcount == 3) {
reject(err);
}
upcount++;
console.log("Retry");
upload();
return;
});
};
upload();
bufs = [left];
}
});
streamTrans.on("end", () => {
const dataChunk = Buffer.concat(bufs);
if (dataChunk.length > 0) {
// 3. Upload last chunk.
let upcount = 0;
const upload = function () {
console.log(
`Progress(last): from ${startByte} to ${
startByte + dataChunk.length - 1
} for ${file_length}`
);
axios({
method: "PUT",
url: location,
headers: {
"Content-Range": `bytes ${startByte}-${
startByte + dataChunk.length - 1
}/${file_length}`,
},
data: dataChunk,
})
.then(({ data }) => resolve(data))
.catch((err) => {
if (upcount == 3) {
reject(err);
}
upcount++;
upload();
return;
});
};
upload();
}
});
streamTrans.on("error", (err) => reject(err));
})
.catch((err) => reject(err));
})
.catch((error) => {
reject(error);
});
});
}
function main() {
const download_url = "###";
const resumable_drive_url = "https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable";
const file_type = "###"; // Please set the mimeType of the downloaded data.
const file_length = 12345; // Please set the data size of the downloaded data.
const accessToken = "###"; // Please set the access token.
const filename = "sample filename"; // Please set the filename on Google Drive.
const chunkSize = 10485760; // This is used as the chunk size for the resumable upload. This is 10 MB as a sample. In this case, please set the multiples of 256 KB (256 x 1024 bytes).
transfer(
download_url,
resumable_drive_url,
file_type,
file_length,
accessToken,
filename,
chunkSize
)
.then((res) => console.log(res))
.catch((err) => console.log(err));
}
main();
结果:
23558108
的文件大小运行时(这是一个示例数据),在控制台中得到以下结果..
Progress: from 0 to 10485759 for 23558108
Progress: from 10485760 to 20971519 for 23558108
Progress(last): from 20971520 to 23558107 for 23558108
{
kind: 'drive#file',
id: '###',
name: 'sample filename',
mimeType: '###'
}
笔记:
关于node.js - 将内存中的文件下载并上传到 Google Drive,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/65570556/
我是一名优秀的程序员,十分优秀!