Compare commits
1 Commits
v3.3.0
...
phantsure/
Author | SHA1 | Date | |
---|---|---|---|
776f9ba022 |
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
@ -31,8 +31,6 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac
|
|||||||
* New actions are available for granular control over caches - [restore](restore/action.yml) and [save](save/action.yml).
|
* New actions are available for granular control over caches - [restore](restore/action.yml) and [save](save/action.yml).
|
||||||
* Support cross-os caching as an opt-in feature. See [Cross OS caching](./tips-and-workarounds.md#cross-os-cache) for more info.
|
* Support cross-os caching as an opt-in feature. See [Cross OS caching](./tips-and-workarounds.md#cross-os-cache) for more info.
|
||||||
* Added option to fail job on cache miss. See [Exit workflow on cache miss](./restore/README.md#exit-workflow-on-cache-miss) for more info.
|
* Added option to fail job on cache miss. See [Exit workflow on cache miss](./restore/README.md#exit-workflow-on-cache-miss) for more info.
|
||||||
* Fix zstd not being used after zstd version upgrade to 1.5.4 on hosted runners
|
|
||||||
* Added option to lookup cache without downloading it.
|
|
||||||
|
|
||||||
See the [v2 README.md](https://github.com/actions/cache/blob/v2/README.md) for older updates.
|
See the [v2 README.md](https://github.com/actions/cache/blob/v2/README.md) for older updates.
|
||||||
|
|
||||||
@ -53,7 +51,6 @@ If you are using a `self-hosted` Windows runner, `GNU tar` and `zstd` are requir
|
|||||||
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
|
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
|
||||||
* `enableCrossOsArchive` - An optional boolean when enabled, allows Windows runners to save or restore caches that can be restored or saved respectively on other platforms. Default: `false`
|
* `enableCrossOsArchive` - An optional boolean when enabled, allows Windows runners to save or restore caches that can be restored or saved respectively on other platforms. Default: `false`
|
||||||
* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false`
|
* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false`
|
||||||
* `lookup-only` - Skip downloading cache. Only check if cache entry exists. Default: `false`
|
|
||||||
|
|
||||||
#### Environment Variables
|
#### Environment Variables
|
||||||
|
|
||||||
|
@ -73,9 +73,3 @@
|
|||||||
|
|
||||||
### 3.2.5
|
### 3.2.5
|
||||||
- Added fix to prevent from setting MYSYS environment variable globally.
|
- Added fix to prevent from setting MYSYS environment variable globally.
|
||||||
|
|
||||||
### 3.2.6
|
|
||||||
- Fix zstd not being used after zstd version upgrade to 1.5.4 on hosted runners.
|
|
||||||
|
|
||||||
### 3.3.0
|
|
||||||
- Added option to lookup cache without downloading it.
|
|
||||||
|
@ -74,15 +74,7 @@ test("restore with no cache found", async () => {
|
|||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(stateMock).toHaveBeenCalledTimes(1);
|
expect(stateMock).toHaveBeenCalledTimes(1);
|
||||||
@ -121,9 +113,7 @@ test("restore with restore keys and no cache found", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -159,15 +149,7 @@ test("restore with cache found for key", async () => {
|
|||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key);
|
||||||
@ -208,9 +190,7 @@ test("restore with cache found for restore key", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -253,9 +233,7 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -296,9 +274,7 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -342,9 +318,7 @@ test("restore with fail on cache miss disabled and no cache found", async () =>
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -122,15 +122,7 @@ test("restore on GHES with AC available ", async () => {
|
|||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
@ -180,9 +172,7 @@ test("restore with too many keys should fail", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
restoreKeys,
|
restoreKeys,
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
@ -202,15 +192,7 @@ test("restore with large key should fail", async () => {
|
|||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
);
|
);
|
||||||
@ -228,15 +210,7 @@ test("restore with invalid key should fail", async () => {
|
|||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: ${key} cannot contain commas.`
|
`Key Validation Error: ${key} cannot contain commas.`
|
||||||
);
|
);
|
||||||
@ -263,15 +237,7 @@ test("restore with no cache found", async () => {
|
|||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
@ -308,9 +274,7 @@ test("restore with restore keys and no cache found", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -344,15 +308,7 @@ test("restore with cache found for key", async () => {
|
|||||||
await run(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
@ -390,9 +346,7 @@ test("restore with cache found for restore key", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -404,48 +358,3 @@ test("restore with cache found for restore key", async () => {
|
|||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with lookup-only set", async () => {
|
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
|
||||||
testUtils.setInputs({
|
|
||||||
path: path,
|
|
||||||
key,
|
|
||||||
lookupOnly: true
|
|
||||||
});
|
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
return Promise.resolve(key);
|
|
||||||
});
|
|
||||||
|
|
||||||
await run(new StateProvider());
|
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: true
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_RESULT", key);
|
|
||||||
expect(stateMock).toHaveBeenCalledTimes(2);
|
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true");
|
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
|
||||||
`Cache found and can be restored from key: ${key}`
|
|
||||||
);
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
@ -75,15 +75,7 @@ test("restore with no cache found", async () => {
|
|||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||||
expect(outputMock).toHaveBeenCalledTimes(1);
|
expect(outputMock).toHaveBeenCalledTimes(1);
|
||||||
@ -121,9 +113,7 @@ test("restore with restore keys and no cache found", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -156,15 +146,7 @@ test("restore with cache found for key", async () => {
|
|||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [], {}, false);
|
||||||
[path],
|
|
||||||
key,
|
|
||||||
[],
|
|
||||||
{
|
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
|
||||||
);
|
|
||||||
|
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
expect(outputMock).toHaveBeenCalledWith("cache-primary-key", key);
|
||||||
expect(outputMock).toHaveBeenCalledWith("cache-hit", "true");
|
expect(outputMock).toHaveBeenCalledWith("cache-hit", "true");
|
||||||
@ -203,9 +185,7 @@ test("restore with cache found for restore key", async () => {
|
|||||||
[path],
|
[path],
|
||||||
key,
|
key,
|
||||||
[restoreKey],
|
[restoreKey],
|
||||||
{
|
{},
|
||||||
lookupOnly: false
|
|
||||||
},
|
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -22,10 +22,6 @@ inputs:
|
|||||||
description: 'Fail the workflow if cache entry is not found'
|
description: 'Fail the workflow if cache entry is not found'
|
||||||
default: 'false'
|
default: 'false'
|
||||||
required: false
|
required: false
|
||||||
lookup-only:
|
|
||||||
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
|
|
||||||
default: 'false'
|
|
||||||
required: false
|
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
|
44
dist/restore-only/index.js
vendored
44
dist/restore-only/index.js
vendored
@ -1152,13 +1152,12 @@ function unlinkFile(filePath) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.unlinkFile = unlinkFile;
|
exports.unlinkFile = unlinkFile;
|
||||||
function getVersion(app, additionalArgs = []) {
|
function getVersion(app) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
let versionOutput = '';
|
let versionOutput = '';
|
||||||
additionalArgs.push('--version');
|
|
||||||
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
|
|
||||||
try {
|
try {
|
||||||
yield exec.exec(`${app}`, additionalArgs, {
|
yield exec.exec(`${app} --version`, [], {
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
silent: true,
|
silent: true,
|
||||||
listeners: {
|
listeners: {
|
||||||
@ -1178,15 +1177,20 @@ function getVersion(app, additionalArgs = []) {
|
|||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const versionOutput = yield getVersion('zstd', ['--quiet']);
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
core.debug(`zstd version: ${version}`);
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
if (versionOutput === '') {
|
// zstd is not installed
|
||||||
return constants_1.CompressionMethod.Gzip;
|
return constants_1.CompressionMethod.Gzip;
|
||||||
}
|
}
|
||||||
else {
|
else if (!version || semver.lt(version, 'v1.3.2')) {
|
||||||
|
// zstd is installed but using a version earlier than v1.3.2
|
||||||
|
// v1.3.2 is required to use the `--long` options in zstd
|
||||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
return constants_1.CompressionMethod.Zstd;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getCompressionMethod = getCompressionMethod;
|
exports.getCompressionMethod = getCompressionMethod;
|
||||||
@ -4975,8 +4979,7 @@ var Inputs;
|
|||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
||||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action
|
||||||
Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action
|
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@ -41807,8 +41810,7 @@ function getDownloadOptions(copy) {
|
|||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 3600000,
|
segmentTimeoutInMs: 3600000
|
||||||
lookupOnly: false
|
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -41823,9 +41825,6 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.segmentTimeoutInMs === 'number') {
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
}
|
}
|
||||||
if (typeof copy.lookupOnly === 'boolean') {
|
|
||||||
result.lookupOnly = copy.lookupOnly;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
||||||
if (segmentDownloadTimeoutMins &&
|
if (segmentDownloadTimeoutMins &&
|
||||||
@ -41838,7 +41837,6 @@ function getDownloadOptions(copy) {
|
|||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
||||||
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
core.debug(`Lookup only: ${result.lookupOnly}`);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -47287,10 +47285,6 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
|||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
|
||||||
core.info('Lookup only - skipping download');
|
|
||||||
return cacheEntry.cacheKey;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
@ -50504,8 +50498,7 @@ function restoreImpl(stateProvider) {
|
|||||||
});
|
});
|
||||||
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
||||||
const failOnCacheMiss = utils.getInputAsBool(constants_1.Inputs.FailOnCacheMiss);
|
const failOnCacheMiss = utils.getInputAsBool(constants_1.Inputs.FailOnCacheMiss);
|
||||||
const lookupOnly = utils.getInputAsBool(constants_1.Inputs.LookupOnly);
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive);
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, { lookupOnly: lookupOnly }, enableCrossOsArchive);
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
if (failOnCacheMiss) {
|
if (failOnCacheMiss) {
|
||||||
throw new Error(`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`);
|
throw new Error(`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`);
|
||||||
@ -50520,12 +50513,7 @@ function restoreImpl(stateProvider) {
|
|||||||
stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey);
|
stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey);
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey);
|
||||||
core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString());
|
core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString());
|
||||||
if (lookupOnly) {
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
core.info(`Cache found and can be restored from key: ${cacheKey}`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
|
||||||
}
|
|
||||||
return cacheKey;
|
return cacheKey;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
44
dist/restore/index.js
vendored
44
dist/restore/index.js
vendored
@ -1152,13 +1152,12 @@ function unlinkFile(filePath) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.unlinkFile = unlinkFile;
|
exports.unlinkFile = unlinkFile;
|
||||||
function getVersion(app, additionalArgs = []) {
|
function getVersion(app) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
let versionOutput = '';
|
let versionOutput = '';
|
||||||
additionalArgs.push('--version');
|
|
||||||
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
|
|
||||||
try {
|
try {
|
||||||
yield exec.exec(`${app}`, additionalArgs, {
|
yield exec.exec(`${app} --version`, [], {
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
silent: true,
|
silent: true,
|
||||||
listeners: {
|
listeners: {
|
||||||
@ -1178,15 +1177,20 @@ function getVersion(app, additionalArgs = []) {
|
|||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const versionOutput = yield getVersion('zstd', ['--quiet']);
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
core.debug(`zstd version: ${version}`);
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
if (versionOutput === '') {
|
// zstd is not installed
|
||||||
return constants_1.CompressionMethod.Gzip;
|
return constants_1.CompressionMethod.Gzip;
|
||||||
}
|
}
|
||||||
else {
|
else if (!version || semver.lt(version, 'v1.3.2')) {
|
||||||
|
// zstd is installed but using a version earlier than v1.3.2
|
||||||
|
// v1.3.2 is required to use the `--long` options in zstd
|
||||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
return constants_1.CompressionMethod.Zstd;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getCompressionMethod = getCompressionMethod;
|
exports.getCompressionMethod = getCompressionMethod;
|
||||||
@ -4975,8 +4979,7 @@ var Inputs;
|
|||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
||||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action
|
||||||
Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action
|
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@ -41778,8 +41781,7 @@ function getDownloadOptions(copy) {
|
|||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 3600000,
|
segmentTimeoutInMs: 3600000
|
||||||
lookupOnly: false
|
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -41794,9 +41796,6 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.segmentTimeoutInMs === 'number') {
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
}
|
}
|
||||||
if (typeof copy.lookupOnly === 'boolean') {
|
|
||||||
result.lookupOnly = copy.lookupOnly;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
||||||
if (segmentDownloadTimeoutMins &&
|
if (segmentDownloadTimeoutMins &&
|
||||||
@ -41809,7 +41808,6 @@ function getDownloadOptions(copy) {
|
|||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
||||||
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
core.debug(`Lookup only: ${result.lookupOnly}`);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -47258,10 +47256,6 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
|||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
|
||||||
core.info('Lookup only - skipping download');
|
|
||||||
return cacheEntry.cacheKey;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
@ -50504,8 +50498,7 @@ function restoreImpl(stateProvider) {
|
|||||||
});
|
});
|
||||||
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
const enableCrossOsArchive = utils.getInputAsBool(constants_1.Inputs.EnableCrossOsArchive);
|
||||||
const failOnCacheMiss = utils.getInputAsBool(constants_1.Inputs.FailOnCacheMiss);
|
const failOnCacheMiss = utils.getInputAsBool(constants_1.Inputs.FailOnCacheMiss);
|
||||||
const lookupOnly = utils.getInputAsBool(constants_1.Inputs.LookupOnly);
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, {}, enableCrossOsArchive);
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys, { lookupOnly: lookupOnly }, enableCrossOsArchive);
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
if (failOnCacheMiss) {
|
if (failOnCacheMiss) {
|
||||||
throw new Error(`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`);
|
throw new Error(`Failed to restore cache entry. Exiting as fail-on-cache-miss is set. Input key: ${primaryKey}`);
|
||||||
@ -50520,12 +50513,7 @@ function restoreImpl(stateProvider) {
|
|||||||
stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey);
|
stateProvider.setState(constants_1.State.CacheMatchedKey, cacheKey);
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(core.getInput(constants_1.Inputs.Key, { required: true }), cacheKey);
|
||||||
core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString());
|
core.setOutput(constants_1.Outputs.CacheHit, isExactKeyMatch.toString());
|
||||||
if (lookupOnly) {
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
core.info(`Cache found and can be restored from key: ${cacheKey}`);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
|
||||||
}
|
|
||||||
return cacheKey;
|
return cacheKey;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
|
34
dist/save-only/index.js
vendored
34
dist/save-only/index.js
vendored
@ -1208,13 +1208,12 @@ function unlinkFile(filePath) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.unlinkFile = unlinkFile;
|
exports.unlinkFile = unlinkFile;
|
||||||
function getVersion(app, additionalArgs = []) {
|
function getVersion(app) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
let versionOutput = '';
|
let versionOutput = '';
|
||||||
additionalArgs.push('--version');
|
|
||||||
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
|
|
||||||
try {
|
try {
|
||||||
yield exec.exec(`${app}`, additionalArgs, {
|
yield exec.exec(`${app} --version`, [], {
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
silent: true,
|
silent: true,
|
||||||
listeners: {
|
listeners: {
|
||||||
@ -1234,15 +1233,20 @@ function getVersion(app, additionalArgs = []) {
|
|||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const versionOutput = yield getVersion('zstd', ['--quiet']);
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
core.debug(`zstd version: ${version}`);
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
if (versionOutput === '') {
|
// zstd is not installed
|
||||||
return constants_1.CompressionMethod.Gzip;
|
return constants_1.CompressionMethod.Gzip;
|
||||||
}
|
}
|
||||||
else {
|
else if (!version || semver.lt(version, 'v1.3.2')) {
|
||||||
|
// zstd is installed but using a version earlier than v1.3.2
|
||||||
|
// v1.3.2 is required to use the `--long` options in zstd
|
||||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
return constants_1.CompressionMethod.Zstd;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getCompressionMethod = getCompressionMethod;
|
exports.getCompressionMethod = getCompressionMethod;
|
||||||
@ -5031,8 +5035,7 @@ var Inputs;
|
|||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
||||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action
|
||||||
Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action
|
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@ -41919,8 +41922,7 @@ function getDownloadOptions(copy) {
|
|||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 3600000,
|
segmentTimeoutInMs: 3600000
|
||||||
lookupOnly: false
|
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -41935,9 +41937,6 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.segmentTimeoutInMs === 'number') {
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
}
|
}
|
||||||
if (typeof copy.lookupOnly === 'boolean') {
|
|
||||||
result.lookupOnly = copy.lookupOnly;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
||||||
if (segmentDownloadTimeoutMins &&
|
if (segmentDownloadTimeoutMins &&
|
||||||
@ -41950,7 +41949,6 @@ function getDownloadOptions(copy) {
|
|||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
||||||
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
core.debug(`Lookup only: ${result.lookupOnly}`);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -47399,10 +47397,6 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
|||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
|
||||||
core.info('Lookup only - skipping download');
|
|
||||||
return cacheEntry.cacheKey;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
|
34
dist/save/index.js
vendored
34
dist/save/index.js
vendored
@ -1152,13 +1152,12 @@ function unlinkFile(filePath) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.unlinkFile = unlinkFile;
|
exports.unlinkFile = unlinkFile;
|
||||||
function getVersion(app, additionalArgs = []) {
|
function getVersion(app) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
let versionOutput = '';
|
let versionOutput = '';
|
||||||
additionalArgs.push('--version');
|
|
||||||
core.debug(`Checking ${app} ${additionalArgs.join(' ')}`);
|
|
||||||
try {
|
try {
|
||||||
yield exec.exec(`${app}`, additionalArgs, {
|
yield exec.exec(`${app} --version`, [], {
|
||||||
ignoreReturnCode: true,
|
ignoreReturnCode: true,
|
||||||
silent: true,
|
silent: true,
|
||||||
listeners: {
|
listeners: {
|
||||||
@ -1178,15 +1177,20 @@ function getVersion(app, additionalArgs = []) {
|
|||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const versionOutput = yield getVersion('zstd', ['--quiet']);
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
core.debug(`zstd version: ${version}`);
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
if (versionOutput === '') {
|
// zstd is not installed
|
||||||
return constants_1.CompressionMethod.Gzip;
|
return constants_1.CompressionMethod.Gzip;
|
||||||
}
|
}
|
||||||
else {
|
else if (!version || semver.lt(version, 'v1.3.2')) {
|
||||||
|
// zstd is installed but using a version earlier than v1.3.2
|
||||||
|
// v1.3.2 is required to use the `--long` options in zstd
|
||||||
return constants_1.CompressionMethod.ZstdWithoutLong;
|
return constants_1.CompressionMethod.ZstdWithoutLong;
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
return constants_1.CompressionMethod.Zstd;
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getCompressionMethod = getCompressionMethod;
|
exports.getCompressionMethod = getCompressionMethod;
|
||||||
@ -4975,8 +4979,7 @@ var Inputs;
|
|||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
Inputs["EnableCrossOsArchive"] = "enableCrossOsArchive";
|
||||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss"; // Input for cache, restore action
|
||||||
Inputs["LookupOnly"] = "lookup-only"; // Input for cache, restore action
|
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@ -41863,8 +41866,7 @@ function getDownloadOptions(copy) {
|
|||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 3600000,
|
segmentTimeoutInMs: 3600000
|
||||||
lookupOnly: false
|
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -41879,9 +41881,6 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.segmentTimeoutInMs === 'number') {
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
}
|
}
|
||||||
if (typeof copy.lookupOnly === 'boolean') {
|
|
||||||
result.lookupOnly = copy.lookupOnly;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
|
||||||
if (segmentDownloadTimeoutMins &&
|
if (segmentDownloadTimeoutMins &&
|
||||||
@ -41894,7 +41893,6 @@ function getDownloadOptions(copy) {
|
|||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
|
||||||
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
core.debug(`Lookup only: ${result.lookupOnly}`);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -47372,10 +47370,6 @@ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArch
|
|||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
if (options === null || options === void 0 ? void 0 : options.lookupOnly) {
|
|
||||||
core.info('Lookup only - skipping download');
|
|
||||||
return cacheEntry.cacheKey;
|
|
||||||
}
|
|
||||||
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
|
18
package-lock.json
generated
18
package-lock.json
generated
@ -1,15 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.3.0",
|
"version": "3.2.5",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.3.0",
|
"version": "3.2.5",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.2.0",
|
"@actions/cache": "^3.1.3",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
@ -36,9 +36,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "3.2.0",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.3.tgz",
|
||||||
"integrity": "sha512-bCjN0+gPLaZZbpOoeK/1ve7J5MO+zv8FpcdKOWF3Tb9to0bWDpvgn9D2c/lC22oPUtHnCWQhLNVMfsWF4OBhNw==",
|
"integrity": "sha512-5YbATJUS6nVs9EkpK7JaliC3G5koKdJT99NLreL0gJlznudzZzXGNIheW5+HUT9C2DBvubOxYIyfX4v2UpZWrA==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
@ -9722,9 +9722,9 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "3.2.0",
|
"version": "3.1.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.3.tgz",
|
||||||
"integrity": "sha512-bCjN0+gPLaZZbpOoeK/1ve7J5MO+zv8FpcdKOWF3Tb9to0bWDpvgn9D2c/lC22oPUtHnCWQhLNVMfsWF4OBhNw==",
|
"integrity": "sha512-5YbATJUS6nVs9EkpK7JaliC3G5koKdJT99NLreL0gJlznudzZzXGNIheW5+HUT9C2DBvubOxYIyfX4v2UpZWrA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.3.0",
|
"version": "3.2.5",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
@ -23,7 +23,7 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.2.0",
|
"@actions/cache": "^3.1.3",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -9,8 +9,7 @@ The restore action restores a cache. It works similarly to the `cache` action ex
|
|||||||
* `key` - An explicit key for a cache entry. See [creating a cache key](../README.md#creating-a-cache-key).
|
* `key` - An explicit key for a cache entry. See [creating a cache key](../README.md#creating-a-cache-key).
|
||||||
* `path` - A list of files, directories, and wildcard patterns to restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
* `path` - A list of files, directories, and wildcard patterns to restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
||||||
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
|
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
|
||||||
* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false`
|
* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: false
|
||||||
* `lookup-only` - Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache. Default: `false`
|
|
||||||
|
|
||||||
### Outputs
|
### Outputs
|
||||||
|
|
||||||
|
@ -19,10 +19,6 @@ inputs:
|
|||||||
description: 'Fail the workflow if cache entry is not found'
|
description: 'Fail the workflow if cache entry is not found'
|
||||||
default: 'false'
|
default: 'false'
|
||||||
required: false
|
required: false
|
||||||
lookup-only:
|
|
||||||
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
|
|
||||||
default: 'false'
|
|
||||||
required: false
|
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
|
@ -4,8 +4,7 @@ export enum Inputs {
|
|||||||
RestoreKeys = "restore-keys", // Input for cache, restore action
|
RestoreKeys = "restore-keys", // Input for cache, restore action
|
||||||
UploadChunkSize = "upload-chunk-size", // Input for cache, save action
|
UploadChunkSize = "upload-chunk-size", // Input for cache, save action
|
||||||
EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action
|
EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action
|
||||||
FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action
|
FailOnCacheMiss = "fail-on-cache-miss" // Input for cache, restore action
|
||||||
LookupOnly = "lookup-only" // Input for cache, restore action
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum Outputs {
|
export enum Outputs {
|
||||||
|
@ -35,13 +35,12 @@ async function restoreImpl(
|
|||||||
Inputs.EnableCrossOsArchive
|
Inputs.EnableCrossOsArchive
|
||||||
);
|
);
|
||||||
const failOnCacheMiss = utils.getInputAsBool(Inputs.FailOnCacheMiss);
|
const failOnCacheMiss = utils.getInputAsBool(Inputs.FailOnCacheMiss);
|
||||||
const lookupOnly = utils.getInputAsBool(Inputs.LookupOnly);
|
|
||||||
|
|
||||||
const cacheKey = await cache.restoreCache(
|
const cacheKey = await cache.restoreCache(
|
||||||
cachePaths,
|
cachePaths,
|
||||||
primaryKey,
|
primaryKey,
|
||||||
restoreKeys,
|
restoreKeys,
|
||||||
{ lookupOnly: lookupOnly },
|
{},
|
||||||
enableCrossOsArchive
|
enableCrossOsArchive
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -70,11 +69,7 @@ async function restoreImpl(
|
|||||||
);
|
);
|
||||||
|
|
||||||
core.setOutput(Outputs.CacheHit, isExactKeyMatch.toString());
|
core.setOutput(Outputs.CacheHit, isExactKeyMatch.toString());
|
||||||
if (lookupOnly) {
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
core.info(`Cache found and can be restored from key: ${cacheKey}`);
|
|
||||||
} else {
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return cacheKey;
|
return cacheKey;
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
|
@ -15,7 +15,6 @@ interface CacheInput {
|
|||||||
restoreKeys?: string[];
|
restoreKeys?: string[];
|
||||||
enableCrossOsArchive?: boolean;
|
enableCrossOsArchive?: boolean;
|
||||||
failOnCacheMiss?: boolean;
|
failOnCacheMiss?: boolean;
|
||||||
lookupOnly?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setInputs(input: CacheInput): void {
|
export function setInputs(input: CacheInput): void {
|
||||||
@ -30,8 +29,6 @@ export function setInputs(input: CacheInput): void {
|
|||||||
);
|
);
|
||||||
input.failOnCacheMiss !== undefined &&
|
input.failOnCacheMiss !== undefined &&
|
||||||
setInput(Inputs.FailOnCacheMiss, input.failOnCacheMiss.toString());
|
setInput(Inputs.FailOnCacheMiss, input.failOnCacheMiss.toString());
|
||||||
input.lookupOnly !== undefined &&
|
|
||||||
setInput(Inputs.LookupOnly, input.lookupOnly.toString());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function clearInputs(): void {
|
export function clearInputs(): void {
|
||||||
@ -41,5 +38,4 @@ export function clearInputs(): void {
|
|||||||
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
||||||
delete process.env[getInputName(Inputs.EnableCrossOsArchive)];
|
delete process.env[getInputName(Inputs.EnableCrossOsArchive)];
|
||||||
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
||||||
delete process.env[getInputName(Inputs.LookupOnly)];
|
|
||||||
}
|
}
|
||||||
|
@ -44,11 +44,6 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
cleanup:
|
cleanup:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
# `actions:write` permission is required to delete caches
|
|
||||||
# See also: https://docs.github.com/en/rest/actions/cache?apiVersion=2022-11-28#delete-a-github-actions-cache-for-a-repository-using-a-cache-id
|
|
||||||
actions: write
|
|
||||||
contents: read
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code
|
- name: Check out code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
@ -58,7 +53,7 @@ jobs:
|
|||||||
gh extension install actions/gh-actions-cache
|
gh extension install actions/gh-actions-cache
|
||||||
|
|
||||||
REPO=${{ github.repository }}
|
REPO=${{ github.repository }}
|
||||||
BRANCH=refs/pull/${{ github.event.pull_request.number }}/merge
|
BRANCH=${{ github.ref }}
|
||||||
|
|
||||||
echo "Fetching list of cache key"
|
echo "Fetching list of cache key"
|
||||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
|
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
|
||||||
|
Reference in New Issue
Block a user