Merge 351dc22c0cbc86a954171eeac892970ebaa232ea into 67b839edb68371cc5014f6cea11c9aa77238de78
This commit is contained in:
commit
697b810b85
BIN
.licenses/npm/@octokit/action.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/action.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/auth-action.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/auth-action.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/auth-token.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/auth-token.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/core.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/core.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/endpoint.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/endpoint.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/graphql.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/graphql.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/openapi-types.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/openapi-types.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/plugin-paginate-rest.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/plugin-paginate-rest.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/plugin-rest-endpoint-methods.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/plugin-rest-endpoint-methods.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/request-error.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/request-error.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/request.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/request.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@octokit/types.dep.yml
generated
Normal file
BIN
.licenses/npm/@octokit/types.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@types/node.dep.yml
generated
BIN
.licenses/npm/@types/node.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/agent-base.dep.yml
generated
Normal file
BIN
.licenses/npm/agent-base.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/before-after-hook.dep.yml
generated
Normal file
BIN
.licenses/npm/before-after-hook.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/debug.dep.yml
generated
Normal file
BIN
.licenses/npm/debug.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/deprecation.dep.yml
generated
Normal file
BIN
.licenses/npm/deprecation.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/https-proxy-agent.dep.yml
generated
Normal file
BIN
.licenses/npm/https-proxy-agent.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/is-plain-object.dep.yml
generated
Normal file
BIN
.licenses/npm/is-plain-object.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/ms.dep.yml
generated
Normal file
BIN
.licenses/npm/ms.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/once.dep.yml
generated
Normal file
BIN
.licenses/npm/once.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/universal-user-agent.dep.yml
generated
Normal file
BIN
.licenses/npm/universal-user-agent.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/wrappy.dep.yml
generated
Normal file
BIN
.licenses/npm/wrappy.dep.yml
generated
Normal file
Binary file not shown.
@ -16,6 +16,7 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
||||
|
||||
### v3
|
||||
|
||||
* Added a workaround to allow updating/refreshing existing caches, via the `refresh-cache` option and requiring a valid Github API token.
|
||||
* Added support for caching in GHES 3.5+.
|
||||
* Fixed download issue for files > 2GB during restore.
|
||||
* Updated the minimum runner version support from node 12 -> node 16.
|
||||
@ -53,10 +54,12 @@ If you are using a `self-hosted` Windows runner, `GNU tar` and `zstd` are requir
|
||||
* `enableCrossOsArchive` - An optional boolean when enabled, allows Windows runners to save or restore caches that can be restored or saved respectively on other platforms. Default: `false`
|
||||
* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false`
|
||||
* `lookup-only` - If true, only checks if cache entry exists and skips download. Does not change save cache behavior. Default: `false`
|
||||
* `refresh-cache` - An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
* `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `10`) to abort download of the segment if not completed in the defined number of minutes. [Read more](https://github.com/actions/cache/blob/main/tips-and-workarounds.md#cache-segment-restore-timeout)
|
||||
* `GITHUB_TOKEN` - A Github API token, required for authenticating to the API when the `refresh-cache` option is enabled.
|
||||
|
||||
### Outputs
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import { RequestError } from "@octokit/request-error";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
@ -9,14 +11,26 @@ jest.mock("@actions/core");
|
||||
jest.mock("@actions/cache");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
testUtils.mockServer.listen({
|
||||
onUnhandledRequest: "warn"
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
delete process.env["GITHUB_REPOSITORY"];
|
||||
delete process.env["GITHUB_TOKEN"];
|
||||
delete process.env["GITHUB_ACTION"];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
testUtils.mockServer.close();
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("isGhes returns true if server url is not github.com", () => {
|
||||
@ -194,6 +208,94 @@ test("getInputAsBool throws if required and value missing", () => {
|
||||
).toThrowError();
|
||||
});
|
||||
|
||||
test("deleteCacheByKey returns 'HttpError: 404' when cache is not found.", async () => {
|
||||
const event = Events.Push;
|
||||
|
||||
process.env["GITHUB_REPOSITORY"] = "owner/repo";
|
||||
process.env["GITHUB_TOKEN"] =
|
||||
"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";
|
||||
process.env["GITHUB_ACTION"] = "__owner___run-repo";
|
||||
process.env[Events.Key] = event;
|
||||
process.env[RefKey] = "ref/heads/feature";
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const response = await actionUtils.deleteCacheByKey(
|
||||
testUtils.failureCacheKey,
|
||||
"owner",
|
||||
"repo"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/404: Not Found/i)
|
||||
);
|
||||
expect(response).toBeInstanceOf(RequestError);
|
||||
expect(response).toMatchObject({
|
||||
name: "HttpError",
|
||||
status: 404
|
||||
});
|
||||
});
|
||||
|
||||
test("deleteCacheByKey returns 'HttpError: 401' on an invalid non-mocked request.", async () => {
|
||||
const event = Events.Push;
|
||||
|
||||
process.env["GITHUB_REPOSITORY"] = "owner/repo";
|
||||
process.env["GITHUB_TOKEN"] =
|
||||
"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";
|
||||
process.env["GITHUB_ACTION"] = "__owner___run-repo";
|
||||
process.env[Events.Key] = event;
|
||||
process.env[RefKey] = "ref/heads/feature";
|
||||
await nock.enableNetConnect();
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const response = await actionUtils.deleteCacheByKey(
|
||||
testUtils.passThroughCacheKey,
|
||||
"owner",
|
||||
"repo"
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
expect.stringMatching(/401: Bad Credentials/i)
|
||||
);
|
||||
expect(response).toBeInstanceOf(RequestError);
|
||||
expect(response).toMatchObject({
|
||||
name: "HttpError",
|
||||
status: 401
|
||||
});
|
||||
nock.disableNetConnect();
|
||||
});
|
||||
|
||||
test("deleteCacheByKey returns matched cache data when successful.", async () => {
|
||||
const event = Events.Push;
|
||||
|
||||
process.env["GITHUB_REPOSITORY"] = "owner/repo";
|
||||
process.env["GITHUB_TOKEN"] =
|
||||
"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";
|
||||
process.env["GITHUB_ACTION"] = "__owner___run-repo";
|
||||
process.env[Events.Key] = event;
|
||||
process.env[RefKey] = "ref/heads/feature";
|
||||
|
||||
const expectedResponse = {
|
||||
id: expect.any(Number),
|
||||
ref: expect.any(String),
|
||||
key: expect.any(String),
|
||||
version: expect.any(String),
|
||||
last_accessed_at: expect.any(String),
|
||||
created_at: expect.any(String),
|
||||
size_in_bytes: expect.any(Number)
|
||||
};
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const response = await actionUtils.deleteCacheByKey(
|
||||
testUtils.successCacheKey,
|
||||
"owner",
|
||||
"repo"
|
||||
);
|
||||
expect(response).toMatchObject({
|
||||
data: expect.objectContaining({
|
||||
total_count: expect.any(Number),
|
||||
actions_caches: expect.arrayContaining([
|
||||
expect.objectContaining(expectedResponse)
|
||||
])
|
||||
})
|
||||
});
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("isCacheFeatureAvailable for ac enabled", () => {
|
||||
jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => true);
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restore";
|
||||
@ -9,6 +10,7 @@ import * as testUtils from "../src/utils/testUtils";
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
@ -53,6 +55,10 @@ afterEach(() => {
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreImpl";
|
||||
@ -10,6 +11,7 @@ import * as testUtils from "../src/utils/testUtils";
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
@ -54,6 +56,10 @@ afterEach(() => {
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("restore with invalid event outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreOnly";
|
||||
@ -9,6 +10,7 @@ import * as testUtils from "../src/utils/testUtils";
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
@ -54,6 +56,10 @@ afterEach(() => {
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/save";
|
||||
@ -11,6 +12,7 @@ jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
@ -73,10 +75,14 @@ afterEach(() => {
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
|
@ -1,9 +1,10 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/saveImpl";
|
||||
import { StateProvider } from "../src/stateProvider";
|
||||
import { NullStateProvider, StateProvider } from "../src/stateProvider";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
@ -12,6 +13,19 @@ jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
testUtils.mockServer.listen({
|
||||
onUnhandledRequest: "warn"
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "deleteCacheByKey").mockImplementation(
|
||||
(key: string, owner: string, repo: string) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.deleteCacheByKey(key, owner, repo);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
@ -52,6 +66,14 @@ beforeAll(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "logWarning").mockImplementation(
|
||||
(message: string) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.logWarning(message);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
@ -69,6 +91,13 @@ afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
delete process.env["GITHUB_TOKEN"];
|
||||
delete process.env["GITHUB_REPOSITORY"];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
testUtils.mockServer.close();
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("save with invalid event outputs warning", async () => {
|
||||
@ -88,7 +117,7 @@ test("save with no primary key in state outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = testUtils.successCacheKey;
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
@ -137,7 +166,7 @@ test("save on GHES with AC available", async () => {
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -179,8 +208,10 @@ test("save on GHES with AC available", async () => {
|
||||
test("save with exact match returns early", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
testUtils.setInput(Inputs.RefreshCache, "false");
|
||||
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = primaryKey;
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -207,7 +238,7 @@ test("save with missing input outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -235,7 +266,7 @@ test("save with large cache outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -280,7 +311,7 @@ test("save with reserve cache failure outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -327,7 +358,7 @@ test("save with server error outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -368,7 +399,7 @@ test("save with server error outputs warning", async () => {
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
@ -406,3 +437,171 @@ test("save with valid inputs uploads a cache", async () => {
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with cache hit and refresh-cache will try to delete and re-create entry", async () => {
|
||||
process.env["GITHUB_REPOSITORY"] = "owner/repo";
|
||||
process.env["GITHUB_TOKEN"] =
|
||||
"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";
|
||||
process.env["GITHUB_ACTION"] = "__owner___run-repo";
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = primaryKey;
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.RefreshCache, "true");
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
{
|
||||
uploadChunkSize: 4000000
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(infoMock).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(infoMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
`Cache hit occurred on the primary key ${primaryKey}, attempting to refresh the contents of the cache.`
|
||||
);
|
||||
expect(infoMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
`Succesfully deleted cache with key: ${primaryKey}`
|
||||
);
|
||||
expect(infoMock).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
`Cache saved with key: ${primaryKey}`
|
||||
);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("Granular save will use lookup to determine if cache needs to be updated or (not) saved.", async () => {
|
||||
process.env["GITHUB_REPOSITORY"] = "owner/repo";
|
||||
process.env["GITHUB_TOKEN"] =
|
||||
"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";
|
||||
process.env["GITHUB_ACTION"] = "__owner___run-repo";
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Key, primaryKey);
|
||||
testUtils.setInput(Inputs.RefreshCache, "true");
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementation(() => {
|
||||
return Promise.resolve(primaryKey);
|
||||
});
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run(new NullStateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
[],
|
||||
{
|
||||
lookupOnly: true
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
{
|
||||
uploadChunkSize: 4000000
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(infoMock).toHaveBeenCalledTimes(3);
|
||||
|
||||
expect(infoMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
`Cache hit occurred on the primary key ${primaryKey}, attempting to refresh the contents of the cache.`
|
||||
);
|
||||
expect(infoMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
`Succesfully deleted cache with key: ${primaryKey}`
|
||||
);
|
||||
expect(infoMock).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
`Cache saved with key: ${primaryKey}`
|
||||
);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with cache hit and refresh-cache will throw a warning if there's no GITHUB_TOKEN", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = testUtils.successCacheKey;
|
||||
const savedCacheKey = primaryKey;
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.RefreshCache, "true");
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
await run(new StateProvider());
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Can't refresh cache, either the repository info or a valid token are missing.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
@ -1,5 +1,6 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import nock from "nock";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/saveOnly";
|
||||
@ -11,6 +12,7 @@ jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
nock.disableNetConnect();
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
@ -73,6 +75,10 @@ afterEach(() => {
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
nock.enableNetConnect();
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
@ -105,6 +111,45 @@ test("save with valid inputs uploads a cache", async () => {
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("Granular save with refreshCache is able to save cache", async () => {
|
||||
process.env["GITHUB_REPOSITORY"] = "owner/repo";
|
||||
process.env["GITHUB_TOKEN"] =
|
||||
"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";
|
||||
process.env["GITHUB_ACTION"] = "__owner___run-repo";
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const inputPath = "node_modules";
|
||||
process.env.CACHE_RESTORE_ONLY_MATCHED_KEY = primaryKey;
|
||||
testUtils.setInput(Inputs.Key, primaryKey);
|
||||
testUtils.setInput(Inputs.RefreshCache, "true");
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(
|
||||
[inputPath],
|
||||
primaryKey,
|
||||
{
|
||||
uploadChunkSize: 4000000
|
||||
},
|
||||
false
|
||||
);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save failing logs the warning message", async () => {
|
||||
const warningMock = jest.spyOn(core, "warning");
|
||||
|
||||
|
@ -26,6 +26,10 @@ inputs:
|
||||
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
|
||||
default: 'false'
|
||||
required: false
|
||||
refresh-cache:
|
||||
description: 'An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false'
|
||||
required: false
|
||||
default: 'false'
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
|
10597
dist/restore-only/index.js
vendored
10597
dist/restore-only/index.js
vendored
File diff suppressed because one or more lines are too long
10429
dist/restore/index.js
vendored
10429
dist/restore/index.js
vendored
File diff suppressed because one or more lines are too long
10463
dist/save-only/index.js
vendored
10463
dist/save-only/index.js
vendored
File diff suppressed because one or more lines are too long
10463
dist/save/index.js
vendored
10463
dist/save/index.js
vendored
File diff suppressed because one or more lines are too long
@ -1,5 +1,3 @@
|
||||
require("nock").disableNetConnect();
|
||||
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ["js", "ts"],
|
||||
|
12232
package-lock.json
generated
12232
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -26,7 +26,8 @@
|
||||
"@actions/cache": "^3.2.1",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
"@actions/io": "^1.1.2",
|
||||
"@octokit/action": "^4.0.10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.5.2",
|
||||
@ -43,9 +44,13 @@
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||
"jest": "^28.1.3",
|
||||
"jest-circus": "^27.5.1",
|
||||
"msw": "^0.49.3",
|
||||
"nock": "^13.2.9",
|
||||
"prettier": "^2.8.0",
|
||||
"ts-jest": "^28.0.8",
|
||||
"typescript": "^4.9.3"
|
||||
},
|
||||
"overrides": {
|
||||
"@mswjs/interceptors": "^0.17.7"
|
||||
}
|
||||
}
|
||||
|
@ -9,6 +9,11 @@ The save action saves a cache. It works similarly to the `cache` action except t
|
||||
* `key` - An explicit key for a cache entry. See [creating a cache key](../README.md#creating-a-cache-key).
|
||||
* `path` - A list of files, directories, and wildcard patterns to cache. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
||||
* `upload-chunk-size` - The chunk size used to split up large files during upload, in bytes
|
||||
* `refresh-cache` - An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false
|
||||
|
||||
#### Environment Variables
|
||||
|
||||
* `GITHUB_TOKEN` - A Github API token, required for authenticating to the API when the `refresh-cache` option is enabled.
|
||||
|
||||
### Outputs
|
||||
|
||||
|
@ -15,6 +15,10 @@ inputs:
|
||||
description: 'An optional boolean when enabled, allows windows runners to save caches that can be restored on other platforms'
|
||||
default: 'false'
|
||||
required: false
|
||||
refresh-cache:
|
||||
description: 'An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false'
|
||||
required: false
|
||||
default: 'false'
|
||||
runs:
|
||||
using: 'node16'
|
||||
main: '../dist/save-only/index.js'
|
||||
|
@ -5,7 +5,8 @@ export enum Inputs {
|
||||
UploadChunkSize = "upload-chunk-size", // Input for cache, save action
|
||||
EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action
|
||||
FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action
|
||||
LookupOnly = "lookup-only" // Input for cache, restore action
|
||||
LookupOnly = "lookup-only", // Input for cache, restore action
|
||||
RefreshCache = "refresh-cache" // Input for cache, save action
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
|
@ -75,7 +75,6 @@ async function restoreImpl(
|
||||
} else {
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
}
|
||||
|
||||
return cacheKey;
|
||||
} catch (error: unknown) {
|
||||
core.setFailed((error as Error).message);
|
||||
|
@ -37,15 +37,54 @@ async function saveImpl(stateProvider: IStateProvider): Promise<number | void> {
|
||||
return;
|
||||
}
|
||||
|
||||
// If matched restore key is same as primary key, then do not save cache
|
||||
// NO-OP in case of SaveOnly action
|
||||
const restoredKey = stateProvider.getCacheState();
|
||||
const refreshCache: boolean = utils.getInputAsBool(
|
||||
Inputs.RefreshCache,
|
||||
{ required: false }
|
||||
);
|
||||
|
||||
if (utils.isExactKeyMatch(primaryKey, restoredKey)) {
|
||||
core.info(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
// If matched restore key is same as primary key, either try to refresh the cache, or just notify and do not save.
|
||||
|
||||
let restoredKey = stateProvider.getCacheState();
|
||||
|
||||
if (refreshCache && !restoredKey) {
|
||||
// If getCacheState didn't give us a key, we're likely using granular actions. Do a lookup to see if we need to refresh or just do a regular save.
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
const enableCrossOsArchive = utils.getInputAsBool(
|
||||
Inputs.EnableCrossOsArchive
|
||||
);
|
||||
return;
|
||||
restoredKey = await cache.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
[],
|
||||
{ lookupOnly: true },
|
||||
enableCrossOsArchive
|
||||
);
|
||||
}
|
||||
if (utils.isExactKeyMatch(primaryKey, restoredKey)) {
|
||||
const { GITHUB_TOKEN, GITHUB_REPOSITORY } = process.env || null;
|
||||
if (GITHUB_TOKEN && GITHUB_REPOSITORY && refreshCache === true) {
|
||||
core.info(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, attempting to refresh the contents of the cache.`
|
||||
);
|
||||
const [_owner, _repo] = GITHUB_REPOSITORY.split(`/`);
|
||||
if (_owner && _repo) {
|
||||
await utils.deleteCacheByKey(primaryKey, _owner, _repo);
|
||||
}
|
||||
} else {
|
||||
if (refreshCache === true) {
|
||||
utils.logWarning(
|
||||
`Can't refresh cache, either the repository info or a valid token are missing.`
|
||||
);
|
||||
return;
|
||||
} else {
|
||||
core.info(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
|
@ -1,7 +1,10 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import { RequestError } from "@octokit/request-error"
|
||||
import { OctokitResponse } from "@octokit/types"
|
||||
|
||||
import { RefKey } from "../constants";
|
||||
const { Octokit } = require("@octokit/action");
|
||||
|
||||
export function isGhes(): boolean {
|
||||
const ghUrl = new URL(
|
||||
@ -19,6 +22,29 @@ export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
export async function deleteCacheByKey(key: string, owner: string, repo: string) {
|
||||
const octokit = new Octokit();
|
||||
let response;
|
||||
try {
|
||||
response = await octokit.rest.actions.deleteActionsCacheByKey({
|
||||
owner: owner,
|
||||
repo: repo,
|
||||
key: key
|
||||
});
|
||||
if (response.status === 200) {
|
||||
core.info(`Succesfully deleted cache with key: ${response.data.actions_caches[0].key}`);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof RequestError) {
|
||||
let err = e as RequestError;
|
||||
let errData = err.response?.data as any | undefined;
|
||||
exports.logWarning(`${err.name} '${err.status}: ${errData?.message}' trying to delete cache with key: ${key}`);
|
||||
}
|
||||
response = e;
|
||||
}
|
||||
return response;
|
||||
}
|
||||
|
||||
export function logWarning(message: string): void {
|
||||
const warningPrefix = "[warning]";
|
||||
core.info(`${warningPrefix}${message}`);
|
||||
|
@ -1,4 +1,12 @@
|
||||
import { Inputs } from "../constants";
|
||||
import { rest } from "msw";
|
||||
import { setupServer } from "msw/node";
|
||||
import nock from "nock";
|
||||
|
||||
export const successCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
export const failureCacheKey = "Windows-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
export const passThroughCacheKey = "macOS-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
|
||||
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
|
||||
function getInputName(name: string): string {
|
||||
@ -16,6 +24,7 @@ interface CacheInput {
|
||||
enableCrossOsArchive?: boolean;
|
||||
failOnCacheMiss?: boolean;
|
||||
lookupOnly?: boolean;
|
||||
refreshCache?: boolean;
|
||||
}
|
||||
|
||||
export function setInputs(input: CacheInput): void {
|
||||
@ -32,6 +41,8 @@ export function setInputs(input: CacheInput): void {
|
||||
setInput(Inputs.FailOnCacheMiss, input.failOnCacheMiss.toString());
|
||||
input.lookupOnly !== undefined &&
|
||||
setInput(Inputs.LookupOnly, input.lookupOnly.toString());
|
||||
input.refreshCache !== undefined &&
|
||||
setInput(Inputs.RefreshCache, input.refreshCache.toString());
|
||||
}
|
||||
|
||||
export function clearInputs(): void {
|
||||
@ -42,4 +53,34 @@ export function clearInputs(): void {
|
||||
delete process.env[getInputName(Inputs.EnableCrossOsArchive)];
|
||||
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
||||
delete process.env[getInputName(Inputs.LookupOnly)];
|
||||
delete process.env[getInputName(Inputs.RefreshCache)];
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
export const mockServer = setupServer(rest.delete('https://api.github.com/repos/owner/repo/actions/caches', (req, res, ctx) => {
|
||||
if (req.url?.searchParams?.get('key') === failureCacheKey) {
|
||||
return res(ctx.status(404),
|
||||
ctx.json({
|
||||
message: "Not Found",
|
||||
documentation_url: "https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key"
|
||||
}));
|
||||
}
|
||||
else if (req.url?.searchParams?.get('key') === successCacheKey) {
|
||||
return res(ctx.status(200),
|
||||
ctx.json({
|
||||
total_count: 1,
|
||||
actions_caches: [{
|
||||
id: 15,
|
||||
ref: "refs/heads/main",
|
||||
key: successCacheKey,
|
||||
version: "93a0f912fdb70083e929c1bf564bca2050be1c4e0932f7f9e78465ddcfbcc8f6",
|
||||
last_accessed_at: "2022-12-29T22:06:42.683333300Z",
|
||||
created_at: "2022-12-29T22:06:42.683333300Z",
|
||||
size_in_bytes: 6057793
|
||||
}]
|
||||
}));
|
||||
}
|
||||
else if (req.url?.searchParams?.get('key') === passThroughCacheKey) {
|
||||
return req.passthrough();
|
||||
}
|
||||
}));
|
90
src/utils/testUtils.ts.orig
Normal file
90
src/utils/testUtils.ts.orig
Normal file
@ -0,0 +1,90 @@
|
||||
import { Inputs } from "../constants";
|
||||
import { rest } from "msw";
|
||||
import { setupServer } from "msw/node";
|
||||
import nock from "nock";
|
||||
|
||||
export const successCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
export const failureCacheKey = "Windows-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
export const passThroughCacheKey = "macOS-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
|
||||
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
|
||||
function getInputName(name: string): string {
|
||||
return `INPUT_${name.replace(/ /g, "_").toUpperCase()}`;
|
||||
}
|
||||
|
||||
export function setInput(name: string, value: string): void {
|
||||
process.env[getInputName(name)] = value;
|
||||
}
|
||||
|
||||
interface CacheInput {
|
||||
path: string;
|
||||
key: string;
|
||||
restoreKeys?: string[];
|
||||
enableCrossOsArchive?: boolean;
|
||||
failOnCacheMiss?: boolean;
|
||||
lookupOnly?: boolean;
|
||||
refreshCache?: boolean;
|
||||
}
|
||||
|
||||
export function setInputs(input: CacheInput): void {
|
||||
setInput(Inputs.Path, input.path);
|
||||
setInput(Inputs.Key, input.key);
|
||||
input.restoreKeys &&
|
||||
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
||||
input.enableCrossOsArchive !== undefined &&
|
||||
setInput(
|
||||
Inputs.EnableCrossOsArchive,
|
||||
input.enableCrossOsArchive.toString()
|
||||
);
|
||||
input.failOnCacheMiss !== undefined &&
|
||||
setInput(Inputs.FailOnCacheMiss, input.failOnCacheMiss.toString());
|
||||
input.lookupOnly !== undefined &&
|
||||
setInput(Inputs.LookupOnly, input.lookupOnly.toString());
|
||||
<<<<<<< HEAD
|
||||
input.refreshCache !== undefined &&
|
||||
setInput(Inputs.RefreshCache, input.refreshCache.toString());
|
||||
=======
|
||||
input.refreshCache && setInput(Inputs.RefreshCache, input.refreshCache.toString());
|
||||
>>>>>>> 0111818 (Allow refreshing cache also with granular save.)
|
||||
}
|
||||
|
||||
export function clearInputs(): void {
|
||||
delete process.env[getInputName(Inputs.Path)];
|
||||
delete process.env[getInputName(Inputs.Key)];
|
||||
delete process.env[getInputName(Inputs.RestoreKeys)];
|
||||
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
||||
delete process.env[getInputName(Inputs.EnableCrossOsArchive)];
|
||||
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
||||
delete process.env[getInputName(Inputs.LookupOnly)];
|
||||
delete process.env[getInputName(Inputs.RefreshCache)];
|
||||
}
|
||||
|
||||
/* istanbul ignore next */
|
||||
export const mockServer = setupServer(rest.delete('https://api.github.com/repos/owner/repo/actions/caches', (req, res, ctx) => {
|
||||
if (req.url?.searchParams?.get('key') === failureCacheKey) {
|
||||
return res(ctx.status(404),
|
||||
ctx.json({
|
||||
message: "Not Found",
|
||||
documentation_url: "https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key"
|
||||
}));
|
||||
}
|
||||
else if (req.url?.searchParams?.get('key') === successCacheKey) {
|
||||
return res(ctx.status(200),
|
||||
ctx.json({
|
||||
total_count: 1,
|
||||
actions_caches: [{
|
||||
id: 15,
|
||||
ref: "refs/heads/main",
|
||||
key: successCacheKey,
|
||||
version: "93a0f912fdb70083e929c1bf564bca2050be1c4e0932f7f9e78465ddcfbcc8f6",
|
||||
last_accessed_at: "2022-12-29T22:06:42.683333300Z",
|
||||
created_at: "2022-12-29T22:06:42.683333300Z",
|
||||
size_in_bytes: 6057793
|
||||
}]
|
||||
}));
|
||||
}
|
||||
else if (req.url?.searchParams?.get('key') === passThroughCacheKey) {
|
||||
return req.passthrough();
|
||||
}
|
||||
}));
|
@ -21,7 +21,8 @@ A cache today is immutable and cannot be updated. But some use cases require the
|
||||
```
|
||||
|
||||
Please note that this will create a new cache on every run and hence will consume the cache [quota](./README.md#cache-limits).
|
||||
|
||||
* As a way to get around this limitation, the `refresh-cache` option exists. The way this works is, after matching a key and restoring a cache, it makes a request directly to the Github API and deletes the contents of that cache entry, then leaving said key free to be updated.
|
||||
|
||||
## Use cache across feature branches
|
||||
|
||||
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
|
||||
|
Loading…
x
Reference in New Issue
Block a user