From 25b1a139de71dcf5d84935ec190231991decef56 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Fri, 8 May 2020 15:59:00 -0400 Subject: [PATCH] Revert "Test disabling concurrency" This reverts commit 6efe05572db81ff16d35a7417f8e3835354690ca. --- dist/restore/index.js | 30 ++++++++++++++---------------- dist/save/index.js | 30 ++++++++++++++---------------- src/cacheHttpClient.ts | 8 ++++---- 3 files changed, 32 insertions(+), 36 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index cf22bff..f0b155d 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2403,22 +2403,20 @@ function uploadFile(httpClient, cacheId, archivePath) { core.debug("Awaiting all uploads"); let offset = 0; try { - // await Promise.all( - // parallelUploads.map(async () => { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), start, end); - } - // }) - // ); + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end); + } + }))); } finally { fs.closeSync(fd); diff --git a/dist/save/index.js b/dist/save/index.js index 4daea2a..6dd837c 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2403,22 +2403,20 @@ function uploadFile(httpClient, cacheId, archivePath) { core.debug("Awaiting all uploads"); let offset = 0; try { - // await Promise.all( - // parallelUploads.map(async () => { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), start, end); - } - // }) - // ); + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end); + } + }))); } finally { fs.closeSync(fd); diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index afa8e11..7f06b6b 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -342,8 +342,8 @@ async function uploadFile( let offset = 0; try { -// await Promise.all( -// parallelUploads.map(async () => { + await Promise.all( + parallelUploads.map(async () => { while (offset < fileSize) { const chunkSize = Math.min( fileSize - offset, @@ -367,8 +367,8 @@ async function uploadFile( end ); } -// }) -// ); + }) + ); } finally { fs.closeSync(fd); }