From 6efe05572db81ff16d35a7417f8e3835354690ca Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Fri, 8 May 2020 12:05:32 -0400 Subject: [PATCH] Test disabling concurrency --- dist/restore/index.js | 30 ++++++++++++++++-------------- dist/save/index.js | 30 ++++++++++++++++-------------- src/cacheHttpClient.ts | 8 ++++---- 3 files changed, 36 insertions(+), 32 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index f0b155d..cf22bff 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2403,20 +2403,22 @@ function uploadFile(httpClient, cacheId, archivePath) { core.debug("Awaiting all uploads"); let offset = 0; try { - yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), start, end); - } - }))); + // await Promise.all( + // parallelUploads.map(async () => { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end); + } + // }) + // ); } finally { fs.closeSync(fd); diff --git a/dist/save/index.js b/dist/save/index.js index 6dd837c..4daea2a 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2403,20 +2403,22 @@ function uploadFile(httpClient, cacheId, archivePath) { core.debug("Awaiting all uploads"); let offset = 0; try { - yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); - const start = offset; - const end = offset + chunkSize - 1; - offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), start, end); - } - }))); + // await Promise.all( + // parallelUploads.map(async () => { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end); + } + // }) + // ); } finally { fs.closeSync(fd); diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 7f06b6b..afa8e11 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -342,8 +342,8 @@ async function uploadFile( let offset = 0; try { - await Promise.all( - parallelUploads.map(async () => { +// await Promise.all( +// parallelUploads.map(async () => { while (offset < fileSize) { const chunkSize = Math.min( fileSize - offset, @@ -367,8 +367,8 @@ async function uploadFile( end ); } - }) - ); +// }) +// ); } finally { fs.closeSync(fd); }