Compare commits
No commits in common. "main" and "pdotl-patch-1" have entirely different histories.
main
...
pdotl-patc
|
@ -14,3 +14,9 @@ jobs:
|
||||||
- name: add_assignees
|
- name: add_assignees
|
||||||
run: |
|
run: |
|
||||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||||
|
|
||||||
|
- uses: actions/add-to-project@v0.4.0
|
||||||
|
name: Add to Project Board
|
||||||
|
with:
|
||||||
|
project-url: https://github.com/orgs/actions/projects/12
|
||||||
|
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||||
|
|
|
@ -18,3 +18,9 @@ jobs:
|
||||||
- name: Add Assignee
|
- name: Add Assignee
|
||||||
run: |
|
run: |
|
||||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||||
|
|
||||||
|
- uses: actions/add-to-project@v0.4.0
|
||||||
|
name: Add to Project Board
|
||||||
|
with:
|
||||||
|
project-url: https://github.com/orgs/actions/projects/12
|
||||||
|
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -107,7 +107,3 @@
|
||||||
### 3.3.1
|
### 3.3.1
|
||||||
|
|
||||||
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
||||||
|
|
||||||
### 3.3.2
|
|
||||||
|
|
||||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, RefKey } from "../src/constants";
|
import { Events, RefKey } from "../src/constants";
|
||||||
import { restoreRun } from "../src/restoreImpl";
|
import run from "../src/restore";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ test("restore with no cache found", async () => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -156,7 +156,7 @@ test("restore with cache found for key", async () => {
|
||||||
return Promise.resolve(key);
|
return Promise.resolve(key);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -201,7 +201,7 @@ test("restore with cache found for restore key", async () => {
|
||||||
return Promise.resolve(restoreKey);
|
return Promise.resolve(restoreKey);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -246,7 +246,7 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -289,7 +289,7 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r
|
||||||
return Promise.resolve(restoreKey);
|
return Promise.resolve(restoreKey);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -335,7 +335,7 @@ test("restore with fail on cache miss disabled and no cache found", async () =>
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
|
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, RefKey } from "../src/constants";
|
import { Events, Inputs, RefKey } from "../src/constants";
|
||||||
import { restoreImpl } from "../src/restoreImpl";
|
import run from "../src/restoreImpl";
|
||||||
import { StateProvider } from "../src/stateProvider";
|
import { StateProvider } from "../src/stateProvider";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
@ -60,7 +60,7 @@ test("restore with invalid event outputs warning", async () => {
|
||||||
const invalidEvent = "commit_comment";
|
const invalidEvent = "commit_comment";
|
||||||
process.env[Events.Key] = invalidEvent;
|
process.env[Events.Key] = invalidEvent;
|
||||||
delete process.env[RefKey];
|
delete process.env[RefKey];
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||||
);
|
);
|
||||||
|
@ -76,7 +76,7 @@ test("restore without AC available should no-op", async () => {
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -92,7 +92,7 @@ test("restore on GHES without AC available should no-op", async () => {
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
@ -119,7 +119,7 @@ test("restore on GHES with AC available ", async () => {
|
||||||
return Promise.resolve(key);
|
return Promise.resolve(key);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -143,7 +143,7 @@ test("restore on GHES with AC available ", async () => {
|
||||||
test("restore with no path should fail", async () => {
|
test("restore with no path should fail", async () => {
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||||
expect(failedMock).not.toHaveBeenCalledWith(
|
expect(failedMock).not.toHaveBeenCalledWith(
|
||||||
|
@ -155,7 +155,7 @@ test("restore with no key", async () => {
|
||||||
testUtils.setInput(Inputs.Path, "node_modules");
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
"Input required and not supplied: key"
|
"Input required and not supplied: key"
|
||||||
|
@ -174,7 +174,7 @@ test("restore with too many keys should fail", async () => {
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
[path],
|
[path],
|
||||||
|
@ -200,7 +200,7 @@ test("restore with large key should fail", async () => {
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
[path],
|
[path],
|
||||||
|
@ -226,7 +226,7 @@ test("restore with invalid key should fail", async () => {
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
[path],
|
[path],
|
||||||
|
@ -260,7 +260,7 @@ test("restore with no cache found", async () => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -301,7 +301,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -341,7 +341,7 @@ test("restore with cache found for key", async () => {
|
||||||
return Promise.resolve(key);
|
return Promise.resolve(key);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -383,7 +383,7 @@ test("restore with cache found for restore key", async () => {
|
||||||
return Promise.resolve(restoreKey);
|
return Promise.resolve(restoreKey);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -424,7 +424,7 @@ test("restore with lookup-only set", async () => {
|
||||||
return Promise.resolve(key);
|
return Promise.resolve(key);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreImpl(new StateProvider());
|
await run(new StateProvider());
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
|
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, RefKey } from "../src/constants";
|
import { Events, RefKey } from "../src/constants";
|
||||||
import { restoreOnlyRun } from "../src/restoreImpl";
|
import run from "../src/restoreOnly";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ test("restore with no cache found", async () => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreOnlyRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreOnlyRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -153,7 +153,7 @@ test("restore with cache found for key", async () => {
|
||||||
return Promise.resolve(key);
|
return Promise.resolve(key);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreOnlyRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
@ -196,7 +196,7 @@ test("restore with cache found for restore key", async () => {
|
||||||
return Promise.resolve(restoreKey);
|
return Promise.resolve(restoreKey);
|
||||||
});
|
});
|
||||||
|
|
||||||
await restoreOnlyRun();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||||
|
|
|
@ -1127,20 +1127,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||||
}
|
}
|
||||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||||
function resolvePaths(patterns) {
|
function resolvePaths(patterns) {
|
||||||
var _a, e_1, _b, _c;
|
var e_1, _a;
|
||||||
var _d;
|
var _b;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const paths = [];
|
const paths = [];
|
||||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
const globber = yield glob.create(patterns.join('\n'), {
|
const globber = yield glob.create(patterns.join('\n'), {
|
||||||
implicitDescendants: false
|
implicitDescendants: false
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
_c = _g.value;
|
const file = _d.value;
|
||||||
_e = false;
|
|
||||||
try {
|
|
||||||
const file = _c;
|
|
||||||
const relativeFile = path
|
const relativeFile = path
|
||||||
.relative(workspace, file)
|
.relative(workspace, file)
|
||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
|
@ -1154,15 +1151,11 @@ function resolvePaths(patterns) {
|
||||||
paths.push(`${relativeFile}`);
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
_e = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
|
@ -3401,7 +3394,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||||
}
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
return crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(components.join('|'))
|
||||||
|
.digest('hex');
|
||||||
}
|
}
|
||||||
exports.getCacheVersion = getCacheVersion;
|
exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
|
@ -3454,23 +3450,15 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const archiveUrl = new url_1.URL(archiveLocation);
|
const archiveUrl = new url_1.URL(archiveLocation);
|
||||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
if (downloadOptions.useAzureSdk &&
|
||||||
if (downloadOptions.useAzureSdk) {
|
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||||
}
|
}
|
||||||
else if (downloadOptions.concurrentBlobDownloads) {
|
|
||||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
// Otherwise, download using the Actions http-client.
|
// Otherwise, download using the Actions http-client.
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -3501,7 +3489,9 @@ function getContentRange(start, end) {
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
core.debug(`Uploading chunk of size ${end -
|
||||||
|
start +
|
||||||
|
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||||
const additionalHeaders = {
|
const additionalHeaders = {
|
||||||
'Content-Type': 'application/octet-stream',
|
'Content-Type': 'application/octet-stream',
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Range': getContentRange(start, end)
|
||||||
|
@ -4876,14 +4866,8 @@ function getProxyUrl(reqUrl) {
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
try {
|
|
||||||
return new URL(proxyVar);
|
return new URL(proxyVar);
|
||||||
}
|
}
|
||||||
catch (_a) {
|
|
||||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
|
||||||
return new URL(`http://${proxyVar}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -4893,10 +4877,6 @@ function checkBypass(reqUrl) {
|
||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const reqHost = reqUrl.hostname;
|
|
||||||
if (isLoopbackAddress(reqHost)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -4922,24 +4902,13 @@ function checkBypass(reqUrl) {
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperNoProxyItem === '*' ||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
|
||||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
|
||||||
(upperNoProxyItem.startsWith('.') &&
|
|
||||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
function isLoopbackAddress(host) {
|
|
||||||
const hostLower = host.toLowerCase();
|
|
||||||
return (hostLower === 'localhost' ||
|
|
||||||
hostLower.startsWith('127.') ||
|
|
||||||
hostLower.startsWith('[::1]') ||
|
|
||||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -5588,7 +5557,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const http_client_1 = __webpack_require__(425);
|
const http_client_1 = __webpack_require__(425);
|
||||||
const storage_blob_1 = __webpack_require__(373);
|
const storage_blob_1 = __webpack_require__(373);
|
||||||
|
@ -5745,115 +5714,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||||
/**
|
|
||||||
* Download the cache using the Actions toolkit http-client concurrently
|
|
||||||
*
|
|
||||||
* @param archiveLocation the URL for the cache
|
|
||||||
* @param archivePath the local path where the cache is saved
|
|
||||||
*/
|
|
||||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
|
||||||
socketTimeout: options.timeoutInMs,
|
|
||||||
keepAlive: true
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
|
||||||
const lengthHeader = res.message.headers['content-length'];
|
|
||||||
if (lengthHeader === undefined || lengthHeader === null) {
|
|
||||||
throw new Error('Content-Length not found on blob response');
|
|
||||||
}
|
|
||||||
const length = parseInt(lengthHeader);
|
|
||||||
if (Number.isNaN(length)) {
|
|
||||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
|
||||||
}
|
|
||||||
const downloads = [];
|
|
||||||
const blockSize = 4 * 1024 * 1024;
|
|
||||||
for (let offset = 0; offset < length; offset += blockSize) {
|
|
||||||
const count = Math.min(blockSize, length - offset);
|
|
||||||
downloads.push({
|
|
||||||
offset,
|
|
||||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// reverse to use .pop instead of .shift
|
|
||||||
downloads.reverse();
|
|
||||||
let actives = 0;
|
|
||||||
let bytesDownloaded = 0;
|
|
||||||
const progress = new DownloadProgress(length);
|
|
||||||
progress.startDisplayTimer();
|
|
||||||
const progressFn = progress.onProgress();
|
|
||||||
const activeDownloads = [];
|
|
||||||
let nextDownload;
|
|
||||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
|
||||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
|
||||||
actives--;
|
|
||||||
delete activeDownloads[segment.offset];
|
|
||||||
bytesDownloaded += segment.count;
|
|
||||||
progressFn({ loadedBytes: bytesDownloaded });
|
|
||||||
});
|
|
||||||
while ((nextDownload = downloads.pop())) {
|
|
||||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
|
||||||
actives++;
|
|
||||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (actives > 0) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
httpClient.dispose();
|
|
||||||
yield archiveDescriptor.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
|
||||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const retries = 5;
|
|
||||||
let failures = 0;
|
|
||||||
while (true) {
|
|
||||||
try {
|
|
||||||
const timeout = 30000;
|
|
||||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
|
||||||
if (typeof result === 'string') {
|
|
||||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
if (failures >= retries) {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
failures++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield httpClient.get(archiveLocation, {
|
|
||||||
Range: `bytes=${offset}-${offset + count - 1}`
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
if (!partRes.readBodyBuffer) {
|
|
||||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
offset,
|
|
||||||
count,
|
|
||||||
buffer: yield partRes.readBodyBuffer()
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||||
* URL points to an Azure Storage endpoint.
|
* URL points to an Azure Storage endpoint.
|
||||||
|
@ -35977,19 +35837,6 @@ class HttpClientResponse {
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
readBodyBuffer() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const chunks = [];
|
|
||||||
this.message.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
this.message.on('end', () => {
|
|
||||||
resolve(Buffer.concat(chunks));
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
|
@ -37054,9 +36901,28 @@ exports.default = {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const restoreImpl_1 = __webpack_require__(835);
|
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||||
(0, restoreImpl_1.restoreOnlyRun)(true);
|
const stateProvider_1 = __webpack_require__(309);
|
||||||
|
function run() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
yield (0, restoreImpl_1.default)(new stateProvider_1.NullStateProvider());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
run();
|
||||||
|
exports.default = run;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -40444,8 +40310,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||||
*/
|
*/
|
||||||
function getDownloadOptions(copy) {
|
function getDownloadOptions(copy) {
|
||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: false,
|
useAzureSdk: true,
|
||||||
concurrentBlobDownloads: true,
|
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 600000,
|
segmentTimeoutInMs: 600000,
|
||||||
|
@ -40455,9 +40320,6 @@ function getDownloadOptions(copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
result.useAzureSdk = copy.useAzureSdk;
|
result.useAzureSdk = copy.useAzureSdk;
|
||||||
}
|
}
|
||||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
|
||||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
|
||||||
}
|
|
||||||
if (typeof copy.downloadConcurrency === 'number') {
|
if (typeof copy.downloadConcurrency === 'number') {
|
||||||
result.downloadConcurrency = copy.downloadConcurrency;
|
result.downloadConcurrency = copy.downloadConcurrency;
|
||||||
}
|
}
|
||||||
|
@ -49234,11 +49096,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(694);
|
const constants_1 = __webpack_require__(694);
|
||||||
const stateProvider_1 = __webpack_require__(309);
|
|
||||||
const utils = __importStar(__webpack_require__(360));
|
const utils = __importStar(__webpack_require__(360));
|
||||||
function restoreImpl(stateProvider) {
|
function restoreImpl(stateProvider) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
@ -49289,40 +49149,7 @@ function restoreImpl(stateProvider) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.restoreImpl = restoreImpl;
|
exports.default = restoreImpl;
|
||||||
function run(stateProvider, earlyExit) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
try {
|
|
||||||
yield restoreImpl(stateProvider);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
if (earlyExit) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// node will stay alive if any promises are not resolved,
|
|
||||||
// which is a possibility if HTTP requests are dangling
|
|
||||||
// due to retries or timeouts. We know that if we got here
|
|
||||||
// that all promises that we care about have successfully
|
|
||||||
// resolved, so simply exit with success.
|
|
||||||
if (earlyExit) {
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function restoreOnlyRun(earlyExit) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.restoreOnlyRun = restoreOnlyRun;
|
|
||||||
function restoreRun(earlyExit) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.restoreRun = restoreRun;
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
|
@ -1127,20 +1127,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||||
}
|
}
|
||||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||||
function resolvePaths(patterns) {
|
function resolvePaths(patterns) {
|
||||||
var _a, e_1, _b, _c;
|
var e_1, _a;
|
||||||
var _d;
|
var _b;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const paths = [];
|
const paths = [];
|
||||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
const globber = yield glob.create(patterns.join('\n'), {
|
const globber = yield glob.create(patterns.join('\n'), {
|
||||||
implicitDescendants: false
|
implicitDescendants: false
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
_c = _g.value;
|
const file = _d.value;
|
||||||
_e = false;
|
|
||||||
try {
|
|
||||||
const file = _c;
|
|
||||||
const relativeFile = path
|
const relativeFile = path
|
||||||
.relative(workspace, file)
|
.relative(workspace, file)
|
||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
|
@ -1154,15 +1151,11 @@ function resolvePaths(patterns) {
|
||||||
paths.push(`${relativeFile}`);
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
_e = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
|
@ -3401,7 +3394,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||||
}
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
return crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(components.join('|'))
|
||||||
|
.digest('hex');
|
||||||
}
|
}
|
||||||
exports.getCacheVersion = getCacheVersion;
|
exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
|
@ -3454,23 +3450,15 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const archiveUrl = new url_1.URL(archiveLocation);
|
const archiveUrl = new url_1.URL(archiveLocation);
|
||||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
if (downloadOptions.useAzureSdk &&
|
||||||
if (downloadOptions.useAzureSdk) {
|
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||||
}
|
}
|
||||||
else if (downloadOptions.concurrentBlobDownloads) {
|
|
||||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
// Otherwise, download using the Actions http-client.
|
// Otherwise, download using the Actions http-client.
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -3501,7 +3489,9 @@ function getContentRange(start, end) {
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
core.debug(`Uploading chunk of size ${end -
|
||||||
|
start +
|
||||||
|
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||||
const additionalHeaders = {
|
const additionalHeaders = {
|
||||||
'Content-Type': 'application/octet-stream',
|
'Content-Type': 'application/octet-stream',
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Range': getContentRange(start, end)
|
||||||
|
@ -4876,14 +4866,8 @@ function getProxyUrl(reqUrl) {
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
try {
|
|
||||||
return new URL(proxyVar);
|
return new URL(proxyVar);
|
||||||
}
|
}
|
||||||
catch (_a) {
|
|
||||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
|
||||||
return new URL(`http://${proxyVar}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -4893,10 +4877,6 @@ function checkBypass(reqUrl) {
|
||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const reqHost = reqUrl.hostname;
|
|
||||||
if (isLoopbackAddress(reqHost)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -4922,24 +4902,13 @@ function checkBypass(reqUrl) {
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperNoProxyItem === '*' ||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
|
||||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
|
||||||
(upperNoProxyItem.startsWith('.') &&
|
|
||||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
function isLoopbackAddress(host) {
|
|
||||||
const hostLower = host.toLowerCase();
|
|
||||||
return (hostLower === 'localhost' ||
|
|
||||||
hostLower.startsWith('127.') ||
|
|
||||||
hostLower.startsWith('[::1]') ||
|
|
||||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -5588,7 +5557,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const http_client_1 = __webpack_require__(425);
|
const http_client_1 = __webpack_require__(425);
|
||||||
const storage_blob_1 = __webpack_require__(373);
|
const storage_blob_1 = __webpack_require__(373);
|
||||||
|
@ -5745,115 +5714,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||||
/**
|
|
||||||
* Download the cache using the Actions toolkit http-client concurrently
|
|
||||||
*
|
|
||||||
* @param archiveLocation the URL for the cache
|
|
||||||
* @param archivePath the local path where the cache is saved
|
|
||||||
*/
|
|
||||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
|
||||||
socketTimeout: options.timeoutInMs,
|
|
||||||
keepAlive: true
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
|
||||||
const lengthHeader = res.message.headers['content-length'];
|
|
||||||
if (lengthHeader === undefined || lengthHeader === null) {
|
|
||||||
throw new Error('Content-Length not found on blob response');
|
|
||||||
}
|
|
||||||
const length = parseInt(lengthHeader);
|
|
||||||
if (Number.isNaN(length)) {
|
|
||||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
|
||||||
}
|
|
||||||
const downloads = [];
|
|
||||||
const blockSize = 4 * 1024 * 1024;
|
|
||||||
for (let offset = 0; offset < length; offset += blockSize) {
|
|
||||||
const count = Math.min(blockSize, length - offset);
|
|
||||||
downloads.push({
|
|
||||||
offset,
|
|
||||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// reverse to use .pop instead of .shift
|
|
||||||
downloads.reverse();
|
|
||||||
let actives = 0;
|
|
||||||
let bytesDownloaded = 0;
|
|
||||||
const progress = new DownloadProgress(length);
|
|
||||||
progress.startDisplayTimer();
|
|
||||||
const progressFn = progress.onProgress();
|
|
||||||
const activeDownloads = [];
|
|
||||||
let nextDownload;
|
|
||||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
|
||||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
|
||||||
actives--;
|
|
||||||
delete activeDownloads[segment.offset];
|
|
||||||
bytesDownloaded += segment.count;
|
|
||||||
progressFn({ loadedBytes: bytesDownloaded });
|
|
||||||
});
|
|
||||||
while ((nextDownload = downloads.pop())) {
|
|
||||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
|
||||||
actives++;
|
|
||||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (actives > 0) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
httpClient.dispose();
|
|
||||||
yield archiveDescriptor.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
|
||||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const retries = 5;
|
|
||||||
let failures = 0;
|
|
||||||
while (true) {
|
|
||||||
try {
|
|
||||||
const timeout = 30000;
|
|
||||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
|
||||||
if (typeof result === 'string') {
|
|
||||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
if (failures >= retries) {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
failures++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield httpClient.get(archiveLocation, {
|
|
||||||
Range: `bytes=${offset}-${offset + count - 1}`
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
if (!partRes.readBodyBuffer) {
|
|
||||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
offset,
|
|
||||||
count,
|
|
||||||
buffer: yield partRes.readBodyBuffer()
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||||
* URL points to an Azure Storage endpoint.
|
* URL points to an Azure Storage endpoint.
|
||||||
|
@ -35885,19 +35745,6 @@ class HttpClientResponse {
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
readBodyBuffer() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const chunks = [];
|
|
||||||
this.message.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
this.message.on('end', () => {
|
|
||||||
resolve(Buffer.concat(chunks));
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
|
@ -40434,8 +40281,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||||
*/
|
*/
|
||||||
function getDownloadOptions(copy) {
|
function getDownloadOptions(copy) {
|
||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: false,
|
useAzureSdk: true,
|
||||||
concurrentBlobDownloads: true,
|
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 600000,
|
segmentTimeoutInMs: 600000,
|
||||||
|
@ -40445,9 +40291,6 @@ function getDownloadOptions(copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
result.useAzureSdk = copy.useAzureSdk;
|
result.useAzureSdk = copy.useAzureSdk;
|
||||||
}
|
}
|
||||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
|
||||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
|
||||||
}
|
|
||||||
if (typeof copy.downloadConcurrency === 'number') {
|
if (typeof copy.downloadConcurrency === 'number') {
|
||||||
result.downloadConcurrency = copy.downloadConcurrency;
|
result.downloadConcurrency = copy.downloadConcurrency;
|
||||||
}
|
}
|
||||||
|
@ -47610,9 +47453,28 @@ module.exports = function(dst, src) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const restoreImpl_1 = __webpack_require__(835);
|
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||||
(0, restoreImpl_1.restoreRun)(true);
|
const stateProvider_1 = __webpack_require__(309);
|
||||||
|
function run() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
run();
|
||||||
|
exports.default = run;
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -49234,11 +49096,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
|
||||||
const cache = __importStar(__webpack_require__(692));
|
const cache = __importStar(__webpack_require__(692));
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const constants_1 = __webpack_require__(694);
|
const constants_1 = __webpack_require__(694);
|
||||||
const stateProvider_1 = __webpack_require__(309);
|
|
||||||
const utils = __importStar(__webpack_require__(443));
|
const utils = __importStar(__webpack_require__(443));
|
||||||
function restoreImpl(stateProvider) {
|
function restoreImpl(stateProvider) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
@ -49289,40 +49149,7 @@ function restoreImpl(stateProvider) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.restoreImpl = restoreImpl;
|
exports.default = restoreImpl;
|
||||||
function run(stateProvider, earlyExit) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
try {
|
|
||||||
yield restoreImpl(stateProvider);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
if (earlyExit) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// node will stay alive if any promises are not resolved,
|
|
||||||
// which is a possibility if HTTP requests are dangling
|
|
||||||
// due to retries or timeouts. We know that if we got here
|
|
||||||
// that all promises that we care about have successfully
|
|
||||||
// resolved, so simply exit with success.
|
|
||||||
if (earlyExit) {
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function restoreOnlyRun(earlyExit) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.restoreOnlyRun = restoreOnlyRun;
|
|
||||||
function restoreRun(earlyExit) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.restoreRun = restoreRun;
|
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
|
@ -1183,20 +1183,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||||
}
|
}
|
||||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||||
function resolvePaths(patterns) {
|
function resolvePaths(patterns) {
|
||||||
var _a, e_1, _b, _c;
|
var e_1, _a;
|
||||||
var _d;
|
var _b;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const paths = [];
|
const paths = [];
|
||||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
const globber = yield glob.create(patterns.join('\n'), {
|
const globber = yield glob.create(patterns.join('\n'), {
|
||||||
implicitDescendants: false
|
implicitDescendants: false
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
_c = _g.value;
|
const file = _d.value;
|
||||||
_e = false;
|
|
||||||
try {
|
|
||||||
const file = _c;
|
|
||||||
const relativeFile = path
|
const relativeFile = path
|
||||||
.relative(workspace, file)
|
.relative(workspace, file)
|
||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
|
@ -1210,15 +1207,11 @@ function resolvePaths(patterns) {
|
||||||
paths.push(`${relativeFile}`);
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
_e = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
|
@ -3457,7 +3450,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||||
}
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
return crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(components.join('|'))
|
||||||
|
.digest('hex');
|
||||||
}
|
}
|
||||||
exports.getCacheVersion = getCacheVersion;
|
exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
|
@ -3510,23 +3506,15 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const archiveUrl = new url_1.URL(archiveLocation);
|
const archiveUrl = new url_1.URL(archiveLocation);
|
||||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
if (downloadOptions.useAzureSdk &&
|
||||||
if (downloadOptions.useAzureSdk) {
|
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||||
}
|
}
|
||||||
else if (downloadOptions.concurrentBlobDownloads) {
|
|
||||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
// Otherwise, download using the Actions http-client.
|
// Otherwise, download using the Actions http-client.
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -3557,7 +3545,9 @@ function getContentRange(start, end) {
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
core.debug(`Uploading chunk of size ${end -
|
||||||
|
start +
|
||||||
|
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||||
const additionalHeaders = {
|
const additionalHeaders = {
|
||||||
'Content-Type': 'application/octet-stream',
|
'Content-Type': 'application/octet-stream',
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Range': getContentRange(start, end)
|
||||||
|
@ -4932,14 +4922,8 @@ function getProxyUrl(reqUrl) {
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
try {
|
|
||||||
return new URL(proxyVar);
|
return new URL(proxyVar);
|
||||||
}
|
}
|
||||||
catch (_a) {
|
|
||||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
|
||||||
return new URL(`http://${proxyVar}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -4949,10 +4933,6 @@ function checkBypass(reqUrl) {
|
||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const reqHost = reqUrl.hostname;
|
|
||||||
if (isLoopbackAddress(reqHost)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -4978,24 +4958,13 @@ function checkBypass(reqUrl) {
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperNoProxyItem === '*' ||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
|
||||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
|
||||||
(upperNoProxyItem.startsWith('.') &&
|
|
||||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
function isLoopbackAddress(host) {
|
|
||||||
const hostLower = host.toLowerCase();
|
|
||||||
return (hostLower === 'localhost' ||
|
|
||||||
hostLower.startsWith('127.') ||
|
|
||||||
hostLower.startsWith('[::1]') ||
|
|
||||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -5644,7 +5613,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const http_client_1 = __webpack_require__(425);
|
const http_client_1 = __webpack_require__(425);
|
||||||
const storage_blob_1 = __webpack_require__(373);
|
const storage_blob_1 = __webpack_require__(373);
|
||||||
|
@ -5801,115 +5770,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||||
/**
|
|
||||||
* Download the cache using the Actions toolkit http-client concurrently
|
|
||||||
*
|
|
||||||
* @param archiveLocation the URL for the cache
|
|
||||||
* @param archivePath the local path where the cache is saved
|
|
||||||
*/
|
|
||||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
|
||||||
socketTimeout: options.timeoutInMs,
|
|
||||||
keepAlive: true
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
|
||||||
const lengthHeader = res.message.headers['content-length'];
|
|
||||||
if (lengthHeader === undefined || lengthHeader === null) {
|
|
||||||
throw new Error('Content-Length not found on blob response');
|
|
||||||
}
|
|
||||||
const length = parseInt(lengthHeader);
|
|
||||||
if (Number.isNaN(length)) {
|
|
||||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
|
||||||
}
|
|
||||||
const downloads = [];
|
|
||||||
const blockSize = 4 * 1024 * 1024;
|
|
||||||
for (let offset = 0; offset < length; offset += blockSize) {
|
|
||||||
const count = Math.min(blockSize, length - offset);
|
|
||||||
downloads.push({
|
|
||||||
offset,
|
|
||||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// reverse to use .pop instead of .shift
|
|
||||||
downloads.reverse();
|
|
||||||
let actives = 0;
|
|
||||||
let bytesDownloaded = 0;
|
|
||||||
const progress = new DownloadProgress(length);
|
|
||||||
progress.startDisplayTimer();
|
|
||||||
const progressFn = progress.onProgress();
|
|
||||||
const activeDownloads = [];
|
|
||||||
let nextDownload;
|
|
||||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
|
||||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
|
||||||
actives--;
|
|
||||||
delete activeDownloads[segment.offset];
|
|
||||||
bytesDownloaded += segment.count;
|
|
||||||
progressFn({ loadedBytes: bytesDownloaded });
|
|
||||||
});
|
|
||||||
while ((nextDownload = downloads.pop())) {
|
|
||||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
|
||||||
actives++;
|
|
||||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (actives > 0) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
httpClient.dispose();
|
|
||||||
yield archiveDescriptor.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
|
||||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const retries = 5;
|
|
||||||
let failures = 0;
|
|
||||||
while (true) {
|
|
||||||
try {
|
|
||||||
const timeout = 30000;
|
|
||||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
|
||||||
if (typeof result === 'string') {
|
|
||||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
if (failures >= retries) {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
failures++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield httpClient.get(archiveLocation, {
|
|
||||||
Range: `bytes=${offset}-${offset + count - 1}`
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
if (!partRes.readBodyBuffer) {
|
|
||||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
offset,
|
|
||||||
count,
|
|
||||||
buffer: yield partRes.readBodyBuffer()
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||||
* URL points to an Azure Storage endpoint.
|
* URL points to an Azure Storage endpoint.
|
||||||
|
@ -35936,19 +35796,6 @@ class HttpClientResponse {
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
readBodyBuffer() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const chunks = [];
|
|
||||||
this.message.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
this.message.on('end', () => {
|
|
||||||
resolve(Buffer.concat(chunks));
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
|
@ -40575,8 +40422,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||||
*/
|
*/
|
||||||
function getDownloadOptions(copy) {
|
function getDownloadOptions(copy) {
|
||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: false,
|
useAzureSdk: true,
|
||||||
concurrentBlobDownloads: true,
|
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 600000,
|
segmentTimeoutInMs: 600000,
|
||||||
|
@ -40586,9 +40432,6 @@ function getDownloadOptions(copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
result.useAzureSdk = copy.useAzureSdk;
|
result.useAzureSdk = copy.useAzureSdk;
|
||||||
}
|
}
|
||||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
|
||||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
|
||||||
}
|
|
||||||
if (typeof copy.downloadConcurrency === 'number') {
|
if (typeof copy.downloadConcurrency === 'number') {
|
||||||
result.downloadConcurrency = copy.downloadConcurrency;
|
result.downloadConcurrency = copy.downloadConcurrency;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1127,20 +1127,17 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||||
}
|
}
|
||||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||||
function resolvePaths(patterns) {
|
function resolvePaths(patterns) {
|
||||||
var _a, e_1, _b, _c;
|
var e_1, _a;
|
||||||
var _d;
|
var _b;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const paths = [];
|
const paths = [];
|
||||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
const globber = yield glob.create(patterns.join('\n'), {
|
const globber = yield glob.create(patterns.join('\n'), {
|
||||||
implicitDescendants: false
|
implicitDescendants: false
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
_c = _g.value;
|
const file = _d.value;
|
||||||
_e = false;
|
|
||||||
try {
|
|
||||||
const file = _c;
|
|
||||||
const relativeFile = path
|
const relativeFile = path
|
||||||
.relative(workspace, file)
|
.relative(workspace, file)
|
||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
|
@ -1154,15 +1151,11 @@ function resolvePaths(patterns) {
|
||||||
paths.push(`${relativeFile}`);
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
|
||||||
_e = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
|
@ -3401,7 +3394,10 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||||
}
|
}
|
||||||
// Add salt to cache version to support breaking changes in cache entry
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
components.push(versionSalt);
|
components.push(versionSalt);
|
||||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
return crypto
|
||||||
|
.createHash('sha256')
|
||||||
|
.update(components.join('|'))
|
||||||
|
.digest('hex');
|
||||||
}
|
}
|
||||||
exports.getCacheVersion = getCacheVersion;
|
exports.getCacheVersion = getCacheVersion;
|
||||||
function getCacheEntry(keys, paths, options) {
|
function getCacheEntry(keys, paths, options) {
|
||||||
|
@ -3454,23 +3450,15 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const archiveUrl = new url_1.URL(archiveLocation);
|
const archiveUrl = new url_1.URL(archiveLocation);
|
||||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
if (downloadOptions.useAzureSdk &&
|
||||||
if (downloadOptions.useAzureSdk) {
|
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||||
}
|
}
|
||||||
else if (downloadOptions.concurrentBlobDownloads) {
|
|
||||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
// Otherwise, download using the Actions http-client.
|
// Otherwise, download using the Actions http-client.
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else {
|
|
||||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCache = downloadCache;
|
exports.downloadCache = downloadCache;
|
||||||
|
@ -3501,7 +3489,9 @@ function getContentRange(start, end) {
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
core.debug(`Uploading chunk of size ${end -
|
||||||
|
start +
|
||||||
|
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||||
const additionalHeaders = {
|
const additionalHeaders = {
|
||||||
'Content-Type': 'application/octet-stream',
|
'Content-Type': 'application/octet-stream',
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Range': getContentRange(start, end)
|
||||||
|
@ -4876,14 +4866,8 @@ function getProxyUrl(reqUrl) {
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
try {
|
|
||||||
return new URL(proxyVar);
|
return new URL(proxyVar);
|
||||||
}
|
}
|
||||||
catch (_a) {
|
|
||||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
|
||||||
return new URL(`http://${proxyVar}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
@ -4893,10 +4877,6 @@ function checkBypass(reqUrl) {
|
||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const reqHost = reqUrl.hostname;
|
|
||||||
if (isLoopbackAddress(reqHost)) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -4922,24 +4902,13 @@ function checkBypass(reqUrl) {
|
||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperNoProxyItem === '*' ||
|
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
|
||||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
|
||||||
(upperNoProxyItem.startsWith('.') &&
|
|
||||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
function isLoopbackAddress(host) {
|
|
||||||
const hostLower = host.toLowerCase();
|
|
||||||
return (hostLower === 'localhost' ||
|
|
||||||
hostLower.startsWith('127.') ||
|
|
||||||
hostLower.startsWith('[::1]') ||
|
|
||||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
@ -5588,7 +5557,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||||
const core = __importStar(__webpack_require__(470));
|
const core = __importStar(__webpack_require__(470));
|
||||||
const http_client_1 = __webpack_require__(425);
|
const http_client_1 = __webpack_require__(425);
|
||||||
const storage_blob_1 = __webpack_require__(373);
|
const storage_blob_1 = __webpack_require__(373);
|
||||||
|
@ -5745,115 +5714,6 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||||
/**
|
|
||||||
* Download the cache using the Actions toolkit http-client concurrently
|
|
||||||
*
|
|
||||||
* @param archiveLocation the URL for the cache
|
|
||||||
* @param archivePath the local path where the cache is saved
|
|
||||||
*/
|
|
||||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
|
||||||
var _a;
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
|
||||||
socketTimeout: options.timeoutInMs,
|
|
||||||
keepAlive: true
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
|
||||||
const lengthHeader = res.message.headers['content-length'];
|
|
||||||
if (lengthHeader === undefined || lengthHeader === null) {
|
|
||||||
throw new Error('Content-Length not found on blob response');
|
|
||||||
}
|
|
||||||
const length = parseInt(lengthHeader);
|
|
||||||
if (Number.isNaN(length)) {
|
|
||||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
|
||||||
}
|
|
||||||
const downloads = [];
|
|
||||||
const blockSize = 4 * 1024 * 1024;
|
|
||||||
for (let offset = 0; offset < length; offset += blockSize) {
|
|
||||||
const count = Math.min(blockSize, length - offset);
|
|
||||||
downloads.push({
|
|
||||||
offset,
|
|
||||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
}
|
|
||||||
// reverse to use .pop instead of .shift
|
|
||||||
downloads.reverse();
|
|
||||||
let actives = 0;
|
|
||||||
let bytesDownloaded = 0;
|
|
||||||
const progress = new DownloadProgress(length);
|
|
||||||
progress.startDisplayTimer();
|
|
||||||
const progressFn = progress.onProgress();
|
|
||||||
const activeDownloads = [];
|
|
||||||
let nextDownload;
|
|
||||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
|
||||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
|
||||||
actives--;
|
|
||||||
delete activeDownloads[segment.offset];
|
|
||||||
bytesDownloaded += segment.count;
|
|
||||||
progressFn({ loadedBytes: bytesDownloaded });
|
|
||||||
});
|
|
||||||
while ((nextDownload = downloads.pop())) {
|
|
||||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
|
||||||
actives++;
|
|
||||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
while (actives > 0) {
|
|
||||||
yield waitAndWrite();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
httpClient.dispose();
|
|
||||||
yield archiveDescriptor.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
|
||||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const retries = 5;
|
|
||||||
let failures = 0;
|
|
||||||
while (true) {
|
|
||||||
try {
|
|
||||||
const timeout = 30000;
|
|
||||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
|
||||||
if (typeof result === 'string') {
|
|
||||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
if (failures >= retries) {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
failures++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return yield httpClient.get(archiveLocation, {
|
|
||||||
Range: `bytes=${offset}-${offset + count - 1}`
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
if (!partRes.readBodyBuffer) {
|
|
||||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
offset,
|
|
||||||
count,
|
|
||||||
buffer: yield partRes.readBodyBuffer()
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
/**
|
/**
|
||||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||||
* URL points to an Azure Storage endpoint.
|
* URL points to an Azure Storage endpoint.
|
||||||
|
@ -35880,19 +35740,6 @@ class HttpClientResponse {
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
readBodyBuffer() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const chunks = [];
|
|
||||||
this.message.on('data', (chunk) => {
|
|
||||||
chunks.push(chunk);
|
|
||||||
});
|
|
||||||
this.message.on('end', () => {
|
|
||||||
resolve(Buffer.concat(chunks));
|
|
||||||
});
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
|
@ -40519,8 +40366,7 @@ exports.getUploadOptions = getUploadOptions;
|
||||||
*/
|
*/
|
||||||
function getDownloadOptions(copy) {
|
function getDownloadOptions(copy) {
|
||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: false,
|
useAzureSdk: true,
|
||||||
concurrentBlobDownloads: true,
|
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 600000,
|
segmentTimeoutInMs: 600000,
|
||||||
|
@ -40530,9 +40376,6 @@ function getDownloadOptions(copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
result.useAzureSdk = copy.useAzureSdk;
|
result.useAzureSdk = copy.useAzureSdk;
|
||||||
}
|
}
|
||||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
|
||||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
|
||||||
}
|
|
||||||
if (typeof copy.downloadConcurrency === 'number') {
|
if (typeof copy.downloadConcurrency === 'number') {
|
||||||
result.downloadConcurrency = copy.downloadConcurrency;
|
result.downloadConcurrency = copy.downloadConcurrency;
|
||||||
}
|
}
|
||||||
|
|
33
examples.md
33
examples.md
|
@ -39,7 +39,6 @@
|
||||||
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
||||||
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
||||||
- [Swift - Mint](#swift---mint)
|
- [Swift - Mint](#swift---mint)
|
||||||
- [* - Bazel](#---bazel)
|
|
||||||
|
|
||||||
## C# - NuGet
|
## C# - NuGet
|
||||||
|
|
||||||
|
@ -658,35 +657,3 @@ steps:
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-mint-
|
${{ runner.os }}-mint-
|
||||||
```
|
```
|
||||||
|
|
||||||
## * - Bazel
|
|
||||||
|
|
||||||
[`bazelisk`](https://github.com/bazelbuild/bazelisk) does not have be to separately downloaded and installed because it's already included in GitHub's `ubuntu-latest` and `macos-latest` base images.
|
|
||||||
|
|
||||||
### Linux
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Cache Bazel
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.cache/bazel
|
|
||||||
key: ${{ runner.os }}-bazel-${{ hashFiles('.bazelversion', '.bazelrc', 'WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-bazel-
|
|
||||||
- run: bazelisk test //...
|
|
||||||
```
|
|
||||||
|
|
||||||
### macOS
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Cache Bazel
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
/private/var/tmp/_bazel_runner/
|
|
||||||
key: ${{ runner.os }}-bazel-${{ hashFiles('.bazelversion', '.bazelrc', 'WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-bazel-
|
|
||||||
- run: bazelisk test //...
|
|
||||||
```
|
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.3.2",
|
"version": "3.3.1",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.3.2",
|
"version": "3.3.1",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.2.2",
|
"@actions/cache": "^3.2.1",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -36,14 +36,14 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "3.2.2",
|
"version": "3.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz",
|
||||||
"integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==",
|
"integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^2.1.1",
|
"@actions/http-client": "^2.0.1",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@azure/abort-controller": "^1.1.0",
|
"@azure/abort-controller": "^1.1.0",
|
||||||
"@azure/ms-rest-js": "^2.6.0",
|
"@azure/ms-rest-js": "^2.6.0",
|
||||||
|
@ -87,9 +87,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/http-client": {
|
"node_modules/@actions/http-client": {
|
||||||
"version": "2.1.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz",
|
||||||
"integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==",
|
"integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6"
|
||||||
}
|
}
|
||||||
|
@ -9707,14 +9707,14 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "3.2.2",
|
"version": "3.2.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz",
|
||||||
"integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==",
|
"integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/glob": "^0.1.0",
|
"@actions/glob": "^0.1.0",
|
||||||
"@actions/http-client": "^2.1.1",
|
"@actions/http-client": "^2.0.1",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@azure/abort-controller": "^1.1.0",
|
"@azure/abort-controller": "^1.1.0",
|
||||||
"@azure/ms-rest-js": "^2.6.0",
|
"@azure/ms-rest-js": "^2.6.0",
|
||||||
|
@ -9757,9 +9757,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@actions/http-client": {
|
"@actions/http-client": {
|
||||||
"version": "2.1.1",
|
"version": "2.0.1",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz",
|
||||||
"integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==",
|
"integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"tunnel": "^0.0.6"
|
"tunnel": "^0.0.6"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.3.2",
|
"version": "3.3.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
|
@ -23,7 +23,7 @@
|
||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^3.2.2",
|
"@actions/cache": "^3.2.1",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
import { restoreRun } from "./restoreImpl";
|
import restoreImpl from "./restoreImpl";
|
||||||
|
import { StateProvider } from "./stateProvider";
|
||||||
|
|
||||||
restoreRun(true);
|
async function run(): Promise<void> {
|
||||||
|
await restoreImpl(new StateProvider());
|
||||||
|
}
|
||||||
|
|
||||||
|
run();
|
||||||
|
|
||||||
|
export default run;
|
||||||
|
|
|
@ -2,14 +2,10 @@ import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, Outputs, State } from "./constants";
|
import { Events, Inputs, Outputs, State } from "./constants";
|
||||||
import {
|
import { IStateProvider } from "./stateProvider";
|
||||||
IStateProvider,
|
|
||||||
NullStateProvider,
|
|
||||||
StateProvider
|
|
||||||
} from "./stateProvider";
|
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
export async function restoreImpl(
|
async function restoreImpl(
|
||||||
stateProvider: IStateProvider
|
stateProvider: IStateProvider
|
||||||
): Promise<string | undefined> {
|
): Promise<string | undefined> {
|
||||||
try {
|
try {
|
||||||
|
@ -86,37 +82,4 @@ export async function restoreImpl(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function run(
|
export default restoreImpl;
|
||||||
stateProvider: IStateProvider,
|
|
||||||
earlyExit: boolean | undefined
|
|
||||||
): Promise<void> {
|
|
||||||
try {
|
|
||||||
await restoreImpl(stateProvider);
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
if (earlyExit) {
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// node will stay alive if any promises are not resolved,
|
|
||||||
// which is a possibility if HTTP requests are dangling
|
|
||||||
// due to retries or timeouts. We know that if we got here
|
|
||||||
// that all promises that we care about have successfully
|
|
||||||
// resolved, so simply exit with success.
|
|
||||||
if (earlyExit) {
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function restoreOnlyRun(
|
|
||||||
earlyExit?: boolean | undefined
|
|
||||||
): Promise<void> {
|
|
||||||
await run(new NullStateProvider(), earlyExit);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function restoreRun(
|
|
||||||
earlyExit?: boolean | undefined
|
|
||||||
): Promise<void> {
|
|
||||||
await run(new StateProvider(), earlyExit);
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,3 +1,10 @@
|
||||||
import { restoreOnlyRun } from "./restoreImpl";
|
import restoreImpl from "./restoreImpl";
|
||||||
|
import { NullStateProvider } from "./stateProvider";
|
||||||
|
|
||||||
restoreOnlyRun(true);
|
async function run(): Promise<void> {
|
||||||
|
await restoreImpl(new NullStateProvider());
|
||||||
|
}
|
||||||
|
|
||||||
|
run();
|
||||||
|
|
||||||
|
export default run;
|
||||||
|
|
Loading…
Reference in New Issue