Compare commits
22 Commits
kotewar/up
...
main
Author | SHA1 | Date |
---|---|---|
Bethany | 704facf57e | |
bethanyj28 | 17e2888746 | |
bethanyj28 | 667d8fdfa2 | |
Chad Kimes | f7ebb81a3f | |
Johanan Idicula | 67b839edb6 | |
Jason Orendorff | 57f0e3f198 | |
Vipul | 04f198bf0b | |
Vipul | bd9b49b6c3 | |
Lovepreet Singh | ea0503788c | |
Lovepreet Singh | 6a1a45d49b | |
Vipul | 9c7b3e90bd | |
Michael Vorburger ⛑️ | 8f2671f18e | |
Michael Vorburger ⛑️ | 6f1f1e10f3 | |
Bishal Prasad | 5cb4bb86c0 | |
Sankalp Kotewar | 84995e0d91 | |
Bishal Prasad | bf96a3f9d8 | |
Michael Vorburger ⛑️ | 4b8460cbff | |
Bishal Prasad | 4e7c82221f | |
Michael Vorburger | ef11f54eee | |
David Bernard | 4b381be638 | |
Bishal Prasad | 7893481812 | |
Lovepreet Singh | 77eb7eb198 |
|
@ -14,9 +14,3 @@ jobs:
|
|||
- name: add_assignees
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
name: Add to Project Board
|
||||
with:
|
||||
project-url: https://github.com/orgs/actions/projects/12
|
||||
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||
|
|
|
@ -18,9 +18,3 @@ jobs:
|
|||
- name: Add Assignee
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
name: Add to Project Board
|
||||
with:
|
||||
project-url: https://github.com/orgs/actions/projects/12
|
||||
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -2,11 +2,9 @@
|
|||
|
||||
This action allows caching dependencies and build outputs to improve workflow execution time.
|
||||
|
||||
Two other actions are available in addition to the primary `cache` action:
|
||||
|
||||
* [Restore action](./restore/README.md)
|
||||
|
||||
* [Save action](./save/README.md)
|
||||
>Two other actions are available in addition to the primary `cache` action:
|
||||
>* [Restore action](./restore/README.md)
|
||||
>* [Save action](./save/README.md)
|
||||
|
||||
[![Tests](https://github.com/actions/cache/actions/workflows/workflow.yml/badge.svg)](https://github.com/actions/cache/actions/workflows/workflow.yml)
|
||||
|
||||
|
|
|
@ -107,3 +107,7 @@
|
|||
### 3.3.1
|
||||
|
||||
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
||||
|
||||
### 3.3.2
|
||||
|
||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restore";
|
||||
import { restoreRun } from "../src/restoreImpl";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
|
@ -71,7 +71,7 @@ test("restore with no cache found", async () => {
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -156,7 +156,7 @@ test("restore with cache found for key", async () => {
|
|||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -201,7 +201,7 @@ test("restore with cache found for restore key", async () => {
|
|||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -246,7 +246,7 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -289,7 +289,7 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r
|
|||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -335,7 +335,7 @@ test("restore with fail on cache miss disabled and no cache found", async () =>
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreImpl";
|
||||
import { restoreImpl } from "../src/restoreImpl";
|
||||
import { StateProvider } from "../src/stateProvider";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
@ -60,7 +60,7 @@ test("restore with invalid event outputs warning", async () => {
|
|||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
|
@ -76,7 +76,7 @@ test("restore without AC available should no-op", async () => {
|
|||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
@ -92,7 +92,7 @@ test("restore on GHES without AC available should no-op", async () => {
|
|||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
@ -119,7 +119,7 @@ test("restore on GHES with AC available ", async () => {
|
|||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -143,7 +143,7 @@ test("restore on GHES with AC available ", async () => {
|
|||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
|
@ -155,7 +155,7 @@ test("restore with no key", async () => {
|
|||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
|
@ -174,7 +174,7 @@ test("restore with too many keys should fail", async () => {
|
|||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
|
@ -200,7 +200,7 @@ test("restore with large key should fail", async () => {
|
|||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
|
@ -226,7 +226,7 @@ test("restore with invalid key should fail", async () => {
|
|||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
|
@ -260,7 +260,7 @@ test("restore with no cache found", async () => {
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -301,7 +301,7 @@ test("restore with restore keys and no cache found", async () => {
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -341,7 +341,7 @@ test("restore with cache found for key", async () => {
|
|||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -383,7 +383,7 @@ test("restore with cache found for restore key", async () => {
|
|||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -424,7 +424,7 @@ test("restore with lookup-only set", async () => {
|
|||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
|
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreOnly";
|
||||
import { restoreOnlyRun } from "../src/restoreImpl";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
|
@ -72,7 +72,7 @@ test("restore with no cache found", async () => {
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
|||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -153,7 +153,7 @@ test("restore with cache found for key", async () => {
|
|||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -196,7 +196,7 @@ test("restore with cache found for restore key", async () => {
|
|||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
|
|
@ -1127,17 +1127,20 @@ function getArchiveFileSizeInBytes(filePath) {
|
|||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
|
@ -1151,11 +1154,15 @@ function resolvePaths(patterns) {
|
|||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
|
@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
|||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
|
@ -3450,15 +3454,23 @@ function downloadCache(archiveLocation, archivePath, options) {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCache = downloadCache;
|
||||
|
@ -3489,9 +3501,7 @@ function getContentRange(start, end) {
|
|||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
|
@ -4866,8 +4876,14 @@ function getProxyUrl(reqUrl) {
|
|||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) {
|
|||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
|
@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) {
|
|||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
|
@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
|
@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
|
@ -35837,6 +35977,19 @@ class HttpClientResponse {
|
|||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
|
@ -36901,28 +37054,9 @@ exports.default = {
|
|||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.NullStateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
const restoreImpl_1 = __webpack_require__(835);
|
||||
(0, restoreImpl_1.restoreOnlyRun)(true);
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
@ -40310,7 +40444,8 @@ exports.getUploadOptions = getUploadOptions;
|
|||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
|
@ -40320,6 +40455,9 @@ function getDownloadOptions(copy) {
|
|||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
|
@ -49096,9 +49234,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(694);
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
const utils = __importStar(__webpack_require__(360));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
@ -49149,7 +49289,40 @@ function restoreImpl(stateProvider) {
|
|||
}
|
||||
});
|
||||
}
|
||||
exports.default = restoreImpl;
|
||||
exports.restoreImpl = restoreImpl;
|
||||
function run(stateProvider, earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield restoreImpl(stateProvider);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
function restoreOnlyRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreOnlyRun = restoreOnlyRun;
|
||||
function restoreRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreRun = restoreRun;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
|
|
@ -1127,17 +1127,20 @@ function getArchiveFileSizeInBytes(filePath) {
|
|||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
|
@ -1151,11 +1154,15 @@ function resolvePaths(patterns) {
|
|||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
|
@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
|||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
|
@ -3450,15 +3454,23 @@ function downloadCache(archiveLocation, archivePath, options) {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCache = downloadCache;
|
||||
|
@ -3489,9 +3501,7 @@ function getContentRange(start, end) {
|
|||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
|
@ -4866,8 +4876,14 @@ function getProxyUrl(reqUrl) {
|
|||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) {
|
|||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
|
@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) {
|
|||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
|
@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
|
@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
|
@ -35745,6 +35885,19 @@ class HttpClientResponse {
|
|||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
|
@ -40281,7 +40434,8 @@ exports.getUploadOptions = getUploadOptions;
|
|||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
|
@ -40291,6 +40445,9 @@ function getDownloadOptions(copy) {
|
|||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
|
@ -47453,28 +47610,9 @@ module.exports = function(dst, src) {
|
|||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
const restoreImpl_1 = __webpack_require__(835);
|
||||
(0, restoreImpl_1.restoreRun)(true);
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
@ -49096,9 +49234,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(694);
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
|
@ -49149,7 +49289,40 @@ function restoreImpl(stateProvider) {
|
|||
}
|
||||
});
|
||||
}
|
||||
exports.default = restoreImpl;
|
||||
exports.restoreImpl = restoreImpl;
|
||||
function run(stateProvider, earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield restoreImpl(stateProvider);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
function restoreOnlyRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreOnlyRun = restoreOnlyRun;
|
||||
function restoreRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreRun = restoreRun;
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
|
|
@ -1183,17 +1183,20 @@ function getArchiveFileSizeInBytes(filePath) {
|
|||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
|
@ -1207,11 +1210,15 @@ function resolvePaths(patterns) {
|
|||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
|
@ -3450,10 +3457,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
|||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
|
@ -3506,15 +3510,23 @@ function downloadCache(archiveLocation, archivePath, options) {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCache = downloadCache;
|
||||
|
@ -3545,9 +3557,7 @@ function getContentRange(start, end) {
|
|||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
|
@ -4922,8 +4932,14 @@ function getProxyUrl(reqUrl) {
|
|||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -4933,6 +4949,10 @@ function checkBypass(reqUrl) {
|
|||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
|
@ -4958,13 +4978,24 @@ function checkBypass(reqUrl) {
|
|||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
|
@ -5613,7 +5644,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
|
@ -5770,6 +5801,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
|
@ -35796,6 +35936,19 @@ class HttpClientResponse {
|
|||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
|
@ -40422,7 +40575,8 @@ exports.getUploadOptions = getUploadOptions;
|
|||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
|
@ -40432,6 +40586,9 @@ function getDownloadOptions(copy) {
|
|||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
|
|
|
@ -1127,17 +1127,20 @@ function getArchiveFileSizeInBytes(filePath) {
|
|||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
try {
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
|
@ -1151,11 +1154,15 @@ function resolvePaths(patterns) {
|
|||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
}
|
||||
finally {
|
||||
_e = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
|
@ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
|||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
|
@ -3450,15 +3454,23 @@ function downloadCache(archiveLocation, archivePath, options) {
|
|||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCache = downloadCache;
|
||||
|
@ -3489,9 +3501,7 @@ function getContentRange(start, end) {
|
|||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
|
@ -4866,8 +4876,14 @@ function getProxyUrl(reqUrl) {
|
|||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
|
@ -4877,6 +4893,10 @@ function checkBypass(reqUrl) {
|
|||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
|
@ -4902,13 +4922,24 @@ function checkBypass(reqUrl) {
|
|||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
|
@ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
|
@ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
|
@ -35740,6 +35880,19 @@ class HttpClientResponse {
|
|||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
|
@ -40366,7 +40519,8 @@ exports.getUploadOptions = getUploadOptions;
|
|||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
|
@ -40376,6 +40530,9 @@ function getDownloadOptions(copy) {
|
|||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
|
|
33
examples.md
33
examples.md
|
@ -39,6 +39,7 @@
|
|||
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
||||
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
||||
- [Swift - Mint](#swift---mint)
|
||||
- [* - Bazel](#---bazel)
|
||||
|
||||
## C# - NuGet
|
||||
|
||||
|
@ -657,3 +658,35 @@ steps:
|
|||
restore-keys: |
|
||||
${{ runner.os }}-mint-
|
||||
```
|
||||
|
||||
## * - Bazel
|
||||
|
||||
[`bazelisk`](https://github.com/bazelbuild/bazelisk) does not have be to separately downloaded and installed because it's already included in GitHub's `ubuntu-latest` and `macos-latest` base images.
|
||||
|
||||
### Linux
|
||||
|
||||
```yaml
|
||||
- name: Cache Bazel
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cache/bazel
|
||||
key: ${{ runner.os }}-bazel-${{ hashFiles('.bazelversion', '.bazelrc', 'WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bazel-
|
||||
- run: bazelisk test //...
|
||||
```
|
||||
|
||||
### macOS
|
||||
|
||||
```yaml
|
||||
- name: Cache Bazel
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
/private/var/tmp/_bazel_runner/
|
||||
key: ${{ runner.os }}-bazel-${{ hashFiles('.bazelversion', '.bazelrc', 'WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bazel-
|
||||
- run: bazelisk test //...
|
||||
```
|
||||
|
|
|
@ -1,15 +1,15 @@
|
|||
{
|
||||
"name": "cache",
|
||||
"version": "3.3.1",
|
||||
"version": "3.3.2",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "cache",
|
||||
"version": "3.3.1",
|
||||
"version": "3.3.2",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.1",
|
||||
"@actions/cache": "^3.2.2",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
@ -36,14 +36,14 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@actions/cache": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz",
|
||||
"integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==",
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz",
|
||||
"integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/http-client": "^2.1.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
|
@ -87,9 +87,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@actions/http-client": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz",
|
||||
"integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==",
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz",
|
||||
"integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==",
|
||||
"dependencies": {
|
||||
"tunnel": "^0.0.6"
|
||||
}
|
||||
|
@ -9707,14 +9707,14 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@actions/cache": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz",
|
||||
"integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==",
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz",
|
||||
"integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==",
|
||||
"requires": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"@actions/http-client": "^2.1.1",
|
||||
"@actions/io": "^1.0.1",
|
||||
"@azure/abort-controller": "^1.1.0",
|
||||
"@azure/ms-rest-js": "^2.6.0",
|
||||
|
@ -9757,9 +9757,9 @@
|
|||
}
|
||||
},
|
||||
"@actions/http-client": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz",
|
||||
"integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==",
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.1.tgz",
|
||||
"integrity": "sha512-qhrkRMB40bbbLo7gF+0vu+X+UawOvQQqNAA/5Unx774RS8poaOhThDOG6BGmxvAnxhQnDp2BG/ZUm65xZILTpw==",
|
||||
"requires": {
|
||||
"tunnel": "^0.0.6"
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "cache",
|
||||
"version": "3.3.1",
|
||||
"version": "3.3.2",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
|
@ -23,7 +23,7 @@
|
|||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.1",
|
||||
"@actions/cache": "^3.2.2",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
|
|
@ -1,10 +1,3 @@
|
|||
import restoreImpl from "./restoreImpl";
|
||||
import { StateProvider } from "./stateProvider";
|
||||
import { restoreRun } from "./restoreImpl";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new StateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
restoreRun(true);
|
||||
|
|
|
@ -2,10 +2,14 @@ import * as cache from "@actions/cache";
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, Outputs, State } from "./constants";
|
||||
import { IStateProvider } from "./stateProvider";
|
||||
import {
|
||||
IStateProvider,
|
||||
NullStateProvider,
|
||||
StateProvider
|
||||
} from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function restoreImpl(
|
||||
export async function restoreImpl(
|
||||
stateProvider: IStateProvider
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
|
@ -82,4 +86,37 @@ async function restoreImpl(
|
|||
}
|
||||
}
|
||||
|
||||
export default restoreImpl;
|
||||
async function run(
|
||||
stateProvider: IStateProvider,
|
||||
earlyExit: boolean | undefined
|
||||
): Promise<void> {
|
||||
try {
|
||||
await restoreImpl(stateProvider);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreOnlyRun(
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<void> {
|
||||
await run(new NullStateProvider(), earlyExit);
|
||||
}
|
||||
|
||||
export async function restoreRun(
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<void> {
|
||||
await run(new StateProvider(), earlyExit);
|
||||
}
|
||||
|
|
|
@ -1,10 +1,3 @@
|
|||
import restoreImpl from "./restoreImpl";
|
||||
import { NullStateProvider } from "./stateProvider";
|
||||
import { restoreOnlyRun } from "./restoreImpl";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new NullStateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
restoreOnlyRun(true);
|
||||
|
|
|
@ -30,10 +30,10 @@ Reusing cache across feature branches is not allowed today to provide cache [iso
|
|||
|
||||
From `v3.2.3` cache is cross-os compatible when `enableCrossOsArchive` input is passed as true. This means that a cache created on `ubuntu-latest` or `mac-latest` can be used by `windows-latest` and vice versa, provided the workflow which runs on `windows-latest` have input `enableCrossOsArchive` as true. This is useful to cache dependencies which are independent of the runner platform. This will help reduce the consumption of the cache quota and help build for multiple platforms from the same cache. Things to keep in mind while using this feature:
|
||||
|
||||
- Only cache those files which are compatible across OSs.
|
||||
- Caching symlinks might cause issues while restoration as they work differently on different OSs.
|
||||
- Only cache files from within your github workspace directory.
|
||||
- Avoid using directory pointers such as `${{ github.workspace }}` or `~` (home) which eventually evaluate to an absolute path and will not match across OSs.
|
||||
- Only cache files that are compatible across OSs.
|
||||
- Caching symlinks might cause issues while restoring them as they behave differently on different OSs.
|
||||
- Be mindful when caching files from outside your github workspace directory as the directory is located at different places across OS.
|
||||
- Avoid using directory pointers such as `${{ github.workspace }}` or `~` (home) which eventually evaluate to an absolute path that does not match across OSs.
|
||||
|
||||
## Force deletion of caches overriding default cache eviction policy
|
||||
|
||||
|
|
Loading…
Reference in New Issue