Fix number parsing issues

This commit is contained in:
Josh Gross 2020-01-06 14:06:24 -05:00
parent 1da52de10f
commit c262ac0154
1 changed files with 10 additions and 2 deletions

View File

@ -191,6 +191,14 @@ async function uploadChunk(
); );
} }
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile( async function uploadFile(
restClient: RestClient, restClient: RestClient,
cacheId: number, cacheId: number,
@ -201,9 +209,9 @@ async function uploadFile(
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const fd = fs.openSync(archivePath, "r"); const fd = fs.openSync(archivePath, "r");
const concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]) ?? 4; // # of HTTP requests in parallel const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = const MAX_CHUNK_SIZE =
Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) ?? 32 * 1024 * 1024; // 32 MB Chunks parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()]; const parallelUploads = [...new Array(concurrency).keys()];