Add request header and debug statements
This commit is contained in:
parent
21dc9a47e6
commit
994e3b75fc
|
@ -1579,7 +1579,7 @@ function reserveCache(key) {
|
|||
const reserveCacheRequest = {
|
||||
key
|
||||
};
|
||||
const response = yield restClient.create("caches", reserveCacheRequest);
|
||||
const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions());
|
||||
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
||||
});
|
||||
}
|
||||
|
@ -1600,6 +1600,7 @@ function bufferToStream(buffer) {
|
|||
}
|
||||
function uploadChunk(restClient, resourceUrl, data, offset) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${data.byteLength} bytes at offset ${offset}`);
|
||||
const requestOptions = getRequestOptions();
|
||||
requestOptions.additionalHeaders = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
|
@ -1619,6 +1620,7 @@ function commitCache(restClient, cacheId, filesize) {
|
|||
function saveCache(cacheId, archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const restClient = createRestClient();
|
||||
core.debug("Uploading chunks");
|
||||
// Upload Chunks
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
let streamIsClosed = false;
|
||||
|
@ -1633,11 +1635,13 @@ function saveCache(cacheId, archivePath) {
|
|||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
}
|
||||
core.debug("Awaiting all uplaods");
|
||||
const responses = yield Promise.all(uploads);
|
||||
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||
if (failedResponse) {
|
||||
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
||||
}
|
||||
core.debug("Commiting cache");
|
||||
// Commit Cache
|
||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||
const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize);
|
||||
|
|
|
@ -1579,7 +1579,7 @@ function reserveCache(key) {
|
|||
const reserveCacheRequest = {
|
||||
key
|
||||
};
|
||||
const response = yield restClient.create("caches", reserveCacheRequest);
|
||||
const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions());
|
||||
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
|
||||
});
|
||||
}
|
||||
|
@ -1600,6 +1600,7 @@ function bufferToStream(buffer) {
|
|||
}
|
||||
function uploadChunk(restClient, resourceUrl, data, offset) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${data.byteLength} bytes at offset ${offset}`);
|
||||
const requestOptions = getRequestOptions();
|
||||
requestOptions.additionalHeaders = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
|
@ -1619,6 +1620,7 @@ function commitCache(restClient, cacheId, filesize) {
|
|||
function saveCache(cacheId, archivePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const restClient = createRestClient();
|
||||
core.debug("Uploading chunks");
|
||||
// Upload Chunks
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
let streamIsClosed = false;
|
||||
|
@ -1633,11 +1635,13 @@ function saveCache(cacheId, archivePath) {
|
|||
uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset));
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
}
|
||||
core.debug("Awaiting all uplaods");
|
||||
const responses = yield Promise.all(uploads);
|
||||
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
|
||||
if (failedResponse) {
|
||||
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
|
||||
}
|
||||
core.debug("Commiting cache");
|
||||
// Commit Cache
|
||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||
const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize);
|
||||
|
@ -2968,11 +2972,13 @@ function run() {
|
|||
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
|
||||
return;
|
||||
}
|
||||
core.debug("Reserving Cache");
|
||||
const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
|
||||
if (cacheId < 0) {
|
||||
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
|
||||
return;
|
||||
}
|
||||
core.debug(`Cache ID: ${cacheId}`);
|
||||
const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
|
||||
core.debug(`Cache Path: ${cachePath}`);
|
||||
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
|
||||
|
@ -3001,6 +3007,7 @@ function run() {
|
|||
utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024 * 1024))} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`);
|
||||
return;
|
||||
}
|
||||
core.debug("Saving Cache");
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
|
|
|
@ -120,7 +120,8 @@ export async function reserveCache(
|
|||
};
|
||||
const response = await restClient.create<ReserverCacheResponse>(
|
||||
"caches",
|
||||
reserveCacheRequest
|
||||
reserveCacheRequest,
|
||||
getRequestOptions()
|
||||
);
|
||||
|
||||
return response?.result?.cacheId ?? -1;
|
||||
|
@ -149,6 +150,7 @@ async function uploadChunk(
|
|||
data: Buffer,
|
||||
offset: number
|
||||
): Promise<IRestResponse<void>> {
|
||||
core.debug(`Uploading chunk of size ${data.byteLength} bytes at offset ${offset}`);
|
||||
const requestOptions = getRequestOptions();
|
||||
requestOptions.additionalHeaders = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
|
@ -179,6 +181,7 @@ export async function saveCache(
|
|||
): Promise<void> {
|
||||
const restClient = createRestClient();
|
||||
|
||||
core.debug("Uploading chunks");
|
||||
// Upload Chunks
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
let streamIsClosed = false;
|
||||
|
@ -195,6 +198,7 @@ export async function saveCache(
|
|||
offset += MAX_CHUNK_SIZE;
|
||||
}
|
||||
|
||||
core.debug("Awaiting all uplaods");
|
||||
const responses = await Promise.all(uploads);
|
||||
|
||||
const failedResponse = responses.find(
|
||||
|
@ -206,6 +210,7 @@ export async function saveCache(
|
|||
);
|
||||
}
|
||||
|
||||
core.debug("Commiting cache");
|
||||
// Commit Cache
|
||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||
const commitCacheResponse = await commitCache(
|
||||
|
|
|
@ -35,12 +35,13 @@ async function run(): Promise<void> {
|
|||
return;
|
||||
}
|
||||
|
||||
core.debug("Reserving Cache");
|
||||
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
|
||||
if (cacheId < 0) {
|
||||
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
|
||||
return;
|
||||
}
|
||||
|
||||
core.debug(`Cache ID: ${cacheId}`);
|
||||
const cachePath = utils.resolvePath(
|
||||
core.getInput(Inputs.Path, { required: true })
|
||||
);
|
||||
|
@ -83,6 +84,7 @@ async function run(): Promise<void> {
|
|||
return;
|
||||
}
|
||||
|
||||
core.debug("Saving Cache");
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath);
|
||||
} catch (error) {
|
||||
utils.logWarning(error.message);
|
||||
|
|
Loading…
Reference in New Issue