aboutsummaryrefslogtreecommitdiff
path: root/editors/code/src/net.ts
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-06-22 18:25:03 +0100
committerGitHub <[email protected]>2020-06-22 18:25:03 +0100
commitceb69203b55d83aeaf4e58bff4a58f2f17d4087d (patch)
tree0a2526a9ef912a1b3aa007f4f2072b2565026ed6 /editors/code/src/net.ts
parenteabbeec14c6624fb93344c25ecd79fe61972abbc (diff)
parent2d32e97cf94ce34e4cb3465c4f5de7b6574f54b5 (diff)
Merge #4989
4989: Hotfix skipping the first chunks of the artifacts r=matklad a=Veetaha Quick hotfix. fixes: #4986, #4987 The stream starts being consumed once we put a handler for `data` event. When extracting `stream.pipeline()` under `withTempFile` in #4963 I didn't move it into the scope too, which due to preliminary awaiting for async operations with the file system allowed for the first chunks of the file to be skipped Co-authored-by: Veetaha <[email protected]>
Diffstat (limited to 'editors/code/src/net.ts')
-rw-r--r--editors/code/src/net.ts13
1 files changed, 7 insertions, 6 deletions
diff --git a/editors/code/src/net.ts b/editors/code/src/net.ts
index 0e7dd29c2..9debdc57b 100644
--- a/editors/code/src/net.ts
+++ b/editors/code/src/net.ts
@@ -114,15 +114,16 @@ async function downloadFile(
114 114
115 log.debug("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath); 115 log.debug("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath);
116 116
117 let readBytes = 0;
118 res.body.on("data", (chunk: Buffer) => {
119 readBytes += chunk.length;
120 onProgress(readBytes, totalBytes);
121 });
122
123 // Put the artifact into a temporary folder to prevent partially downloaded files when user kills vscode 117 // Put the artifact into a temporary folder to prevent partially downloaded files when user kills vscode
124 await withTempFile(async tempFilePath => { 118 await withTempFile(async tempFilePath => {
125 const destFileStream = fs.createWriteStream(tempFilePath, { mode }); 119 const destFileStream = fs.createWriteStream(tempFilePath, { mode });
120
121 let readBytes = 0;
122 res.body.on("data", (chunk: Buffer) => {
123 readBytes += chunk.length;
124 onProgress(readBytes, totalBytes);
125 });
126
126 await pipeline(res.body, destFileStream); 127 await pipeline(res.body, destFileStream);
127 await new Promise<void>(resolve => { 128 await new Promise<void>(resolve => {
128 destFileStream.on("close", resolve); 129 destFileStream.on("close", resolve);