Compare commits
22 Commits
Author | SHA1 | Date | |
---|---|---|---|
e448a9b857 | |||
e6bd6b7749 | |||
1fd4c858f9 | |||
a2af908e3a | |||
928d1a16d9 | |||
e4a7ffadfc | |||
726a6dcd01 | |||
3db166e2ea | |||
d86048c66c | |||
328d69042a | |||
27bce4eee7 | |||
f8b42f7ab4 | |||
2106e8cf10 | |||
db66798ebc | |||
d359fd0772 | |||
350822c32f | |||
abecf4abf4 | |||
604e071d21 | |||
4560c23b39 | |||
59018c2f85 | |||
58740802ef | |||
f109393e79 |
3
.gitattributes
vendored
3
.gitattributes
vendored
@ -1 +1,2 @@
|
||||
* text=auto eol=lf
|
||||
* text=auto eol=lf
|
||||
.licenses/** -diff linguist-generated=true
|
1
.github/CODEOWNERS
vendored
Normal file
1
.github/CODEOWNERS
vendored
Normal file
@ -0,0 +1 @@
|
||||
* @actions/artifacts-actions
|
20
.github/workflows/licensed.yml
vendored
Normal file
20
.github/workflows/licensed.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: Licensed
|
||||
|
||||
on:
|
||||
push: {branches: main}
|
||||
pull_request: {branches: main}
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
name: Check licenses
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: npm ci
|
||||
- name: Install licensed
|
||||
run: |
|
||||
cd $RUNNER_TEMP
|
||||
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/2.12.2/licensed-2.12.2-linux-x64.tar.gz
|
||||
sudo tar -xzf licensed.tar.gz
|
||||
sudo mv licensed /usr/local/bin/licensed
|
||||
- run: licensed status
|
15
.licensed.yml
Normal file
15
.licensed.yml
Normal file
@ -0,0 +1,15 @@
|
||||
sources:
|
||||
npm: true
|
||||
|
||||
allowed:
|
||||
- apache-2.0
|
||||
- bsd-2-clause
|
||||
- bsd-3-clause
|
||||
- isc
|
||||
- mit
|
||||
- cc0-1.0
|
||||
- unlicense
|
||||
|
||||
reviewed:
|
||||
npm:
|
||||
- fs.realpath
|
BIN
.licenses/npm/@actions/artifact.dep.yml
generated
Normal file
BIN
.licenses/npm/@actions/artifact.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@actions/core.dep.yml
generated
Normal file
BIN
.licenses/npm/@actions/core.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@actions/glob.dep.yml
generated
Normal file
BIN
.licenses/npm/@actions/glob.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@actions/http-client.dep.yml
generated
Normal file
BIN
.licenses/npm/@actions/http-client.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@actions/io.dep.yml
generated
Normal file
BIN
.licenses/npm/@actions/io.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/@types/tmp.dep.yml
generated
Normal file
BIN
.licenses/npm/@types/tmp.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/balanced-match.dep.yml
generated
Normal file
BIN
.licenses/npm/balanced-match.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/brace-expansion.dep.yml
generated
Normal file
BIN
.licenses/npm/brace-expansion.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/concat-map.dep.yml
generated
Normal file
BIN
.licenses/npm/concat-map.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/fs.realpath.dep.yml
generated
Normal file
BIN
.licenses/npm/fs.realpath.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/glob.dep.yml
generated
Normal file
BIN
.licenses/npm/glob.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/inflight.dep.yml
generated
Normal file
BIN
.licenses/npm/inflight.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/inherits.dep.yml
generated
Normal file
BIN
.licenses/npm/inherits.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/minimatch.dep.yml
generated
Normal file
BIN
.licenses/npm/minimatch.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/once.dep.yml
generated
Normal file
BIN
.licenses/npm/once.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/path-is-absolute.dep.yml
generated
Normal file
BIN
.licenses/npm/path-is-absolute.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/rimraf.dep.yml
generated
Normal file
BIN
.licenses/npm/rimraf.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/tmp-promise.dep.yml
generated
Normal file
BIN
.licenses/npm/tmp-promise.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/tmp.dep.yml
generated
Normal file
BIN
.licenses/npm/tmp.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/tunnel.dep.yml
generated
Normal file
BIN
.licenses/npm/tunnel.dep.yml
generated
Normal file
Binary file not shown.
BIN
.licenses/npm/wrappy.dep.yml
generated
Normal file
BIN
.licenses/npm/wrappy.dep.yml
generated
Normal file
Binary file not shown.
@ -41,6 +41,10 @@ Here are a few things you can do that will increase the likelihood of your pull
|
||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
|
||||
## Licensed
|
||||
|
||||
This repository uses a tool called [Licensed](https://github.com/github/licensed) to verify third party dependencies. You may need to locally install licensed and run `licensed cache` to update the dependency cache if you install or update a production dependency. If licensed cache is unable to determine the dependency, you may need to modify the cache file yourself to put the correct license. You should still verify the dependency, licensed is a tool to help, but is not a substitute for human review of dependencies.
|
||||
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
|
26
README.md
26
README.md
@ -199,14 +199,37 @@ Environment variables along with context expressions can also be used for input.
|
||||
path: ${{ github.workspace }}/artifact/**/*
|
||||
```
|
||||
|
||||
### Retention Period
|
||||
|
||||
Artifacts are retained for 90 days by default. You can specify a shorter retention period using the `retention-days` input:
|
||||
|
||||
```yaml
|
||||
- name: 'Create a file'
|
||||
run: echo "I won't live long" > my_file.txt
|
||||
|
||||
- name: 'Upload Artifact'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifact
|
||||
path: my_file.txt
|
||||
retention-days: 5
|
||||
|
||||
```
|
||||
|
||||
The retention period must be between 1 and 90 inclusive. For more information see [artifact and log retention policies](https://docs.github.com/en/free-pro-team@latest/actions/reference/usage-limits-billing-and-administration#artifact-and-log-retention-policy).
|
||||
|
||||
## Where does the upload go?
|
||||
In the top right corner of a workflow run, once the run is over, if you used this action, there will be a `Artifacts` dropdown which you can download items from. Here's a screenshot of what it looks like<br/>
|
||||
In the top right corner of a workflow run, once the run is over, if you used this action, there will be an `Artifacts` dropdown which you can download items from. Here's a screenshot of what it looks like<br/>
|
||||
<img src="https://user-images.githubusercontent.com/16109154/72556687-20235a80-386d-11ea-9e2a-b534faa77083.png" width="375" height="140">
|
||||
|
||||
There is a trashcan icon that can be used to delete the artifact. This icon will only appear for users who have write permissions to the repository.
|
||||
|
||||
# Limitations
|
||||
|
||||
### Zipped Artifact Downloads
|
||||
|
||||
During a workflow run, files are uploaded and downloaded individually using the `upload-artifact` and `download-artifact` actions. However, when a workflow run finishes and an artifact is downloaded from either the UI or through the [download api](https://developer.github.com/v3/actions/artifacts/#download-an-artifact), a zip is dynamically created with all the file contents that were uploaded. There is currently no way to download artifacts after a workflow run finishes in a format other than a zip or to download artifact contents individually. One of the consequences of this limitation is that if a zip is uploaded during a workflow run and then downloaded from the UI, there will be a double zip created.
|
||||
|
||||
### Permission Loss
|
||||
|
||||
:exclamation: File permissions are not maintained during artifact upload :exclamation: For example, if you make a file executable using `chmod` and then upload that file, post-download the file is no longer guaranteed to be set as an executable.
|
||||
@ -230,6 +253,7 @@ If file permissions and case sensitivity are required, you can `tar` all of your
|
||||
path: my_files.tar
|
||||
```
|
||||
|
||||
|
||||
## Additional Documentation
|
||||
|
||||
See [persisting workflow data using artifacts](https://help.github.com/en/actions/configuring-and-managing-workflows/persisting-workflow-data-using-artifacts) for additional examples and tips.
|
||||
|
@ -17,6 +17,12 @@ inputs:
|
||||
error: Fail the action with an error message
|
||||
ignore: Do not output any warnings or errors, the action does not fail
|
||||
default: 'warn'
|
||||
retention-days:
|
||||
description: >
|
||||
Duration after which artifact will expire in days. 0 means using default retention.
|
||||
|
||||
Minimum 1 day.
|
||||
Maximum 90 days unless changed from the repository settings page.
|
||||
runs:
|
||||
using: 'node12'
|
||||
main: 'dist/index.js'
|
||||
main: 'dist/index.js'
|
||||
|
441
dist/index.js
vendored
441
dist/index.js
vendored
@ -137,6 +137,32 @@ function onceStrict (fn) {
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 82:
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||
* @param input input to sanitize into a string
|
||||
*/
|
||||
function toCommandValue(input) {
|
||||
if (input === null || input === undefined) {
|
||||
return '';
|
||||
}
|
||||
else if (typeof input === 'string' || input instanceof String) {
|
||||
return input;
|
||||
}
|
||||
return JSON.stringify(input);
|
||||
}
|
||||
exports.toCommandValue = toCommandValue;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 87:
|
||||
@ -1074,6 +1100,42 @@ function regExpEscape (s) {
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 102:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// For internal use, subject to change.
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const utils_1 = __webpack_require__(82);
|
||||
function issueCommand(command, message) {
|
||||
const filePath = process.env[`GITHUB_${command}`];
|
||||
if (!filePath) {
|
||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||
}
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Missing file at path: ${filePath}`);
|
||||
}
|
||||
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
exports.issueCommand = issueCommand;
|
||||
//# sourceMappingURL=file-command.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 117:
|
||||
@ -3767,7 +3829,7 @@ class DefaultArtifactClient {
|
||||
}
|
||||
else {
|
||||
// Create an entry for the artifact in the file container
|
||||
const response = yield uploadHttpClient.createArtifactInFileContainer(name);
|
||||
const response = yield uploadHttpClient.createArtifactInFileContainer(name, options);
|
||||
if (!response.fileContainerResourceUrl) {
|
||||
core.debug(response.toString());
|
||||
throw new Error('No URL provided by the Artifact Service to upload an artifact to');
|
||||
@ -4013,12 +4075,16 @@ function run() {
|
||||
}
|
||||
}
|
||||
else {
|
||||
core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file(s) uploaded`);
|
||||
const s = searchResult.filesToUpload.length === 1 ? '' : 's';
|
||||
core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`);
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`);
|
||||
const artifactClient = artifact_1.create();
|
||||
const options = {
|
||||
continueOnError: false
|
||||
};
|
||||
if (inputs.retentionDays) {
|
||||
options.retentionDays = inputs.retentionDays;
|
||||
}
|
||||
const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||
if (uploadResponse.failedItems.length > 0) {
|
||||
core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`);
|
||||
@ -4108,6 +4174,10 @@ function getWorkSpaceDirectory() {
|
||||
return workspaceDirectory;
|
||||
}
|
||||
exports.getWorkSpaceDirectory = getWorkSpaceDirectory;
|
||||
function getRetentionDays() {
|
||||
return process.env['GITHUB_RETENTION_DAYS'];
|
||||
}
|
||||
exports.getRetentionDays = getRetentionDays;
|
||||
//# sourceMappingURL=config-variables.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -4910,6 +4980,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const utils_1 = __webpack_require__(82);
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
@ -4964,13 +5035,13 @@ class Command {
|
||||
}
|
||||
}
|
||||
function escapeData(s) {
|
||||
return (s || '')
|
||||
return utils_1.toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A');
|
||||
}
|
||||
function escapeProperty(s) {
|
||||
return (s || '')
|
||||
return utils_1.toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
@ -5042,6 +5113,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const command_1 = __webpack_require__(431);
|
||||
const file_command_1 = __webpack_require__(102);
|
||||
const utils_1 = __webpack_require__(82);
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
/**
|
||||
@ -5064,11 +5137,21 @@ var ExitCode;
|
||||
/**
|
||||
* Sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable
|
||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function exportVariable(name, val) {
|
||||
process.env[name] = val;
|
||||
command_1.issueCommand('set-env', { name }, val);
|
||||
const convertedVal = utils_1.toCommandValue(val);
|
||||
process.env[name] = convertedVal;
|
||||
const filePath = process.env['GITHUB_ENV'] || '';
|
||||
if (filePath) {
|
||||
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
||||
file_command_1.issueCommand('ENV', commandValue);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||
}
|
||||
}
|
||||
exports.exportVariable = exportVariable;
|
||||
/**
|
||||
@ -5084,7 +5167,13 @@ exports.setSecret = setSecret;
|
||||
* @param inputPath
|
||||
*/
|
||||
function addPath(inputPath) {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
const filePath = process.env['GITHUB_PATH'] || '';
|
||||
if (filePath) {
|
||||
file_command_1.issueCommand('PATH', inputPath);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
}
|
||||
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||||
}
|
||||
exports.addPath = addPath;
|
||||
@ -5107,12 +5196,22 @@ exports.getInput = getInput;
|
||||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function setOutput(name, value) {
|
||||
command_1.issueCommand('set-output', { name }, value);
|
||||
}
|
||||
exports.setOutput = setOutput;
|
||||
/**
|
||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||
*
|
||||
*/
|
||||
function setCommandEcho(enabled) {
|
||||
command_1.issue('echo', enabled ? 'on' : 'off');
|
||||
}
|
||||
exports.setCommandEcho = setCommandEcho;
|
||||
//-----------------------------------------------------------------------
|
||||
// Results
|
||||
//-----------------------------------------------------------------------
|
||||
@ -5146,18 +5245,18 @@ function debug(message) {
|
||||
exports.debug = debug;
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message
|
||||
* @param message error issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
function error(message) {
|
||||
command_1.issue('error', message);
|
||||
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
||||
}
|
||||
exports.error = error;
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message
|
||||
* @param message warning issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
function warning(message) {
|
||||
command_1.issue('warning', message);
|
||||
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
||||
}
|
||||
exports.warning = warning;
|
||||
/**
|
||||
@ -5215,8 +5314,9 @@ exports.group = group;
|
||||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||
*
|
||||
* @param name name of the state to store
|
||||
* @param value value to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function saveState(name, value) {
|
||||
command_1.issueCommand('save-state', { name }, value);
|
||||
}
|
||||
@ -5235,6 +5335,88 @@ exports.getState = getState;
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 489:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const utils_1 = __webpack_require__(870);
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const config_variables_1 = __webpack_require__(401);
|
||||
function retry(name, operation, customErrorMessages, maxAttempts) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let response = undefined;
|
||||
let statusCode = undefined;
|
||||
let isRetryable = false;
|
||||
let errorMessage = '';
|
||||
let customErrorInformation = undefined;
|
||||
let attempt = 1;
|
||||
while (attempt <= maxAttempts) {
|
||||
try {
|
||||
response = yield operation();
|
||||
statusCode = response.message.statusCode;
|
||||
if (utils_1.isSuccessStatusCode(statusCode)) {
|
||||
return response;
|
||||
}
|
||||
// Extra error information that we want to display if a particular response code is hit
|
||||
if (statusCode) {
|
||||
customErrorInformation = customErrorMessages.get(statusCode);
|
||||
}
|
||||
isRetryable = utils_1.isRetryableStatusCode(statusCode);
|
||||
errorMessage = `Artifact service responded with ${statusCode}`;
|
||||
}
|
||||
catch (error) {
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
if (!isRetryable) {
|
||||
core.info(`${name} - Error is not retryable`);
|
||||
if (response) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
break;
|
||||
}
|
||||
core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
|
||||
yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt));
|
||||
attempt++;
|
||||
}
|
||||
if (response) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
if (customErrorInformation) {
|
||||
throw Error(`${name} failed: ${customErrorInformation}`);
|
||||
}
|
||||
throw Error(`${name} failed: ${errorMessage}`);
|
||||
});
|
||||
}
|
||||
exports.retry = retry;
|
||||
function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return yield retry(name, method, customErrorMessages, maxAttempts);
|
||||
});
|
||||
}
|
||||
exports.retryHttpClientRequest = retryHttpClientRequest;
|
||||
//# sourceMappingURL=requestUtils.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 532:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
@ -5309,7 +5491,6 @@ exports.getDownloadSpecification = getDownloadSpecification;
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url = __webpack_require__(835);
|
||||
const http = __webpack_require__(605);
|
||||
const https = __webpack_require__(211);
|
||||
const pm = __webpack_require__(950);
|
||||
@ -5358,7 +5539,7 @@ var MediaTypes;
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
function getProxyUrl(serverUrl) {
|
||||
let proxyUrl = pm.getProxyUrl(url.parse(serverUrl));
|
||||
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
@ -5377,6 +5558,15 @@ const HttpResponseRetryCodes = [
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
class HttpClientError extends Error {
|
||||
constructor(message, statusCode) {
|
||||
super(message);
|
||||
this.name = 'HttpClientError';
|
||||
this.statusCode = statusCode;
|
||||
Object.setPrototypeOf(this, HttpClientError.prototype);
|
||||
}
|
||||
}
|
||||
exports.HttpClientError = HttpClientError;
|
||||
class HttpClientResponse {
|
||||
constructor(message) {
|
||||
this.message = message;
|
||||
@ -5395,7 +5585,7 @@ class HttpClientResponse {
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let parsedUrl = new URL(requestUrl);
|
||||
return parsedUrl.protocol === 'https:';
|
||||
}
|
||||
exports.isHttps = isHttps;
|
||||
@ -5500,7 +5690,7 @@ class HttpClient {
|
||||
if (this._disposed) {
|
||||
throw new Error('Client has already been disposed.');
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let parsedUrl = new URL(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
@ -5539,7 +5729,7 @@ class HttpClient {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
let parsedRedirectUrl = new URL(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
@ -5655,7 +5845,7 @@ class HttpClient {
|
||||
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
|
||||
*/
|
||||
getAgent(serverUrl) {
|
||||
let parsedUrl = url.parse(serverUrl);
|
||||
let parsedUrl = new URL(serverUrl);
|
||||
return this._getAgent(parsedUrl);
|
||||
}
|
||||
_prepareRequest(method, requestUrl, headers) {
|
||||
@ -5728,7 +5918,7 @@ class HttpClient {
|
||||
maxSockets: maxSockets,
|
||||
keepAlive: this._keepAlive,
|
||||
proxy: {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
}
|
||||
@ -5823,12 +6013,8 @@ class HttpClient {
|
||||
else {
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
err['statusCode'] = statusCode;
|
||||
if (response.result) {
|
||||
err['result'] = response.result;
|
||||
}
|
||||
let err = new HttpClientError(msg, statusCode);
|
||||
err.result = response.result;
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
@ -6390,11 +6576,19 @@ function getInputs() {
|
||||
if (!noFileBehavior) {
|
||||
core.setFailed(`Unrecognized ${constants_1.Inputs.IfNoFilesFound} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(constants_1.NoFileOptions)}`);
|
||||
}
|
||||
return {
|
||||
const inputs = {
|
||||
artifactName: name,
|
||||
searchPath: path,
|
||||
ifNoFilesFound: noFileBehavior
|
||||
};
|
||||
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
||||
if (retentionDaysStr) {
|
||||
inputs.retentionDays = parseInt(retentionDaysStr);
|
||||
if (isNaN(inputs.retentionDays)) {
|
||||
core.setFailed('Invalid retention-days');
|
||||
}
|
||||
}
|
||||
return inputs;
|
||||
}
|
||||
exports.getInputs = getInputs;
|
||||
|
||||
@ -6653,8 +6847,10 @@ const util_1 = __webpack_require__(669);
|
||||
const url_1 = __webpack_require__(835);
|
||||
const perf_hooks_1 = __webpack_require__(630);
|
||||
const status_reporter_1 = __webpack_require__(176);
|
||||
const http_client_1 = __webpack_require__(539);
|
||||
const http_manager_1 = __webpack_require__(452);
|
||||
const upload_gzip_1 = __webpack_require__(647);
|
||||
const requestUtils_1 = __webpack_require__(489);
|
||||
const stat = util_1.promisify(fs.stat);
|
||||
class UploadHttpClient {
|
||||
constructor() {
|
||||
@ -6666,31 +6862,38 @@ class UploadHttpClient {
|
||||
* @param {string} artifactName Name of the artifact being created
|
||||
* @returns The response from the Artifact Service if the file container was successfully created
|
||||
*/
|
||||
createArtifactInFileContainer(artifactName) {
|
||||
createArtifactInFileContainer(artifactName, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const parameters = {
|
||||
Type: 'actions_storage',
|
||||
Name: artifactName
|
||||
};
|
||||
// calculate retention period
|
||||
if (options && options.retentionDays) {
|
||||
const maxRetentionStr = config_variables_1.getRetentionDays();
|
||||
parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr);
|
||||
}
|
||||
const data = JSON.stringify(parameters, null, 2);
|
||||
const artifactUrl = utils_1.getArtifactUrl();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const rawResponse = yield client.post(artifactUrl, data, headers);
|
||||
const body = yield rawResponse.readBody();
|
||||
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
}
|
||||
else if (utils_1.isForbiddenStatusCode(rawResponse.message.statusCode)) {
|
||||
// if a 403 is returned when trying to create a file container, the customer has exceeded
|
||||
// their storage quota so no new artifact containers can be created
|
||||
throw new Error(`Artifact storage quota has been hit. Unable to upload any new artifacts`);
|
||||
}
|
||||
else {
|
||||
utils_1.displayHttpDiagnostics(rawResponse);
|
||||
throw new Error(`Unable to create a container for the artifact ${artifactName} at ${artifactUrl}`);
|
||||
}
|
||||
// Extra information to display when a particular HTTP code is returned
|
||||
// If a 403 is returned when trying to create a file container, the customer has exceeded
|
||||
// their storage quota so no new artifact containers can be created
|
||||
const customErrorMessages = new Map([
|
||||
[
|
||||
http_client_1.HttpCodes.Forbidden,
|
||||
'Artifact storage quota has been hit. Unable to upload any new artifacts'
|
||||
],
|
||||
[
|
||||
http_client_1.HttpCodes.BadRequest,
|
||||
`The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}`
|
||||
]
|
||||
]);
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages);
|
||||
const body = yield response.readBody();
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
@ -6914,12 +7117,12 @@ class UploadHttpClient {
|
||||
this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
|
||||
if (retryAfterValue) {
|
||||
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`);
|
||||
yield new Promise(resolve => setTimeout(resolve, retryAfterValue));
|
||||
yield utils_1.sleep(retryAfterValue);
|
||||
}
|
||||
else {
|
||||
const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount);
|
||||
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`);
|
||||
yield new Promise(resolve => setTimeout(resolve, backoffTime));
|
||||
yield utils_1.sleep(backoffTime);
|
||||
}
|
||||
core.info(`Finished backoff for retry #${retryCount}, continuing with upload`);
|
||||
return;
|
||||
@ -6971,7 +7174,6 @@ class UploadHttpClient {
|
||||
*/
|
||||
patchArtifactSize(size, artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const resourceUrl = new url_1.URL(utils_1.getArtifactUrl());
|
||||
resourceUrl.searchParams.append('artifactName', artifactName);
|
||||
const parameters = { Size: size };
|
||||
@ -6979,19 +7181,18 @@ class UploadHttpClient {
|
||||
core.debug(`URL is ${resourceUrl.toString()}`);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const response = yield client.patch(resourceUrl.toString(), data, headers);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
}
|
||||
else if (response.message.statusCode === 404) {
|
||||
throw new Error(`An Artifact with the name ${artifactName} was not found`);
|
||||
}
|
||||
else {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
core.info(body);
|
||||
throw new Error(`Unable to finish uploading artifact ${artifactName} to ${resourceUrl}`);
|
||||
}
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
// Extra information to display when a particular HTTP code is returned
|
||||
const customErrorMessages = new Map([
|
||||
[
|
||||
http_client_1.HttpCodes.NotFound,
|
||||
`An Artifact with the name ${artifactName} was not found`
|
||||
]
|
||||
]);
|
||||
// TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages);
|
||||
yield response.readBody();
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -7314,6 +7515,7 @@ var Inputs;
|
||||
Inputs["Name"] = "name";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["IfNoFilesFound"] = "if-no-files-found";
|
||||
Inputs["RetentionDays"] = "retention-days";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var NoFileOptions;
|
||||
(function (NoFileOptions) {
|
||||
@ -7410,6 +7612,7 @@ const status_reporter_1 = __webpack_require__(176);
|
||||
const perf_hooks_1 = __webpack_require__(630);
|
||||
const http_manager_1 = __webpack_require__(452);
|
||||
const config_variables_1 = __webpack_require__(401);
|
||||
const requestUtils_1 = __webpack_require__(489);
|
||||
class DownloadHttpClient {
|
||||
constructor() {
|
||||
this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download');
|
||||
@ -7425,13 +7628,9 @@ class DownloadHttpClient {
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(artifactUrl, headers);
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); }));
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
}
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
throw new Error(`Unable to list artifacts for the run. Resource Url ${artifactUrl}`);
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
@ -7447,13 +7646,9 @@ class DownloadHttpClient {
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(resourceUrl.toString(), headers);
|
||||
const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); }));
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
}
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
throw new Error(`Unable to get ContainersItems from ${resourceUrl}`);
|
||||
return JSON.parse(body);
|
||||
});
|
||||
}
|
||||
/**
|
||||
@ -7503,7 +7698,7 @@ class DownloadHttpClient {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let retryCount = 0;
|
||||
const retryLimit = config_variables_1.getRetryLimit();
|
||||
const destinationStream = fs.createWriteStream(downloadPath);
|
||||
let destinationStream = fs.createWriteStream(downloadPath);
|
||||
const headers = utils_1.getDownloadHeaders('application/json', true, true);
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
@ -7528,22 +7723,40 @@ class DownloadHttpClient {
|
||||
if (retryAfterValue) {
|
||||
// Back off by waiting the specified time denoted by the retry-after header
|
||||
core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`);
|
||||
yield new Promise(resolve => setTimeout(resolve, retryAfterValue));
|
||||
yield utils_1.sleep(retryAfterValue);
|
||||
}
|
||||
else {
|
||||
// Back off using an exponential value that depends on the retry count
|
||||
const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount);
|
||||
core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`);
|
||||
yield new Promise(resolve => setTimeout(resolve, backoffTime));
|
||||
yield utils_1.sleep(backoffTime);
|
||||
}
|
||||
core.info(`Finished backoff for retry #${retryCount}, continuing with download`);
|
||||
}
|
||||
});
|
||||
const isAllBytesReceived = (expected, received) => {
|
||||
// be lenient, if any input is missing, assume success, i.e. not truncated
|
||||
if (!expected ||
|
||||
!received ||
|
||||
process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) {
|
||||
core.info('Skipping download validation.');
|
||||
return true;
|
||||
}
|
||||
return parseInt(expected) === received;
|
||||
};
|
||||
const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () {
|
||||
destinationStream.close();
|
||||
yield utils_1.rmFile(fileDownloadPath);
|
||||
destinationStream = fs.createWriteStream(fileDownloadPath);
|
||||
});
|
||||
// keep trying to download a file until a retry limit has been reached
|
||||
while (retryCount <= retryLimit) {
|
||||
let response;
|
||||
try {
|
||||
response = yield makeDownloadRequest();
|
||||
if (core.isDebug()) {
|
||||
utils_1.displayHttpDiagnostics(response);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// if an error is caught, it is usually indicative of a timeout so retry the download
|
||||
@ -7554,14 +7767,30 @@ class DownloadHttpClient {
|
||||
yield backOff();
|
||||
continue;
|
||||
}
|
||||
let forceRetry = false;
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||||
// The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
|
||||
// which can cause some gzip encoded data to be lost
|
||||
// Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
|
||||
return this.pipeResponseToFile(response, destinationStream, isGzip(response.message.headers));
|
||||
try {
|
||||
const isGzipped = isGzip(response.message.headers);
|
||||
yield this.pipeResponseToFile(response, destinationStream, isGzipped);
|
||||
if (isGzipped ||
|
||||
isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) {
|
||||
return;
|
||||
}
|
||||
else {
|
||||
forceRetry = true;
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
// retry on error, most likely streams were corrupted
|
||||
forceRetry = true;
|
||||
}
|
||||
}
|
||||
else if (utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
||||
if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) {
|
||||
core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`);
|
||||
resetDestinationStream(downloadPath);
|
||||
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
|
||||
utils_1.isThrottledStatusCode(response.message.statusCode)
|
||||
? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers))
|
||||
@ -7587,24 +7816,40 @@ class DownloadHttpClient {
|
||||
if (isGzip) {
|
||||
const gunzip = zlib.createGunzip();
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.error(`An error occurred while attempting to read the response stream`);
|
||||
gunzip.close();
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(gunzip)
|
||||
.on('error', error => {
|
||||
core.error(`An error occurred while attempting to decompress the response stream`);
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve();
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(`An error has been encountered while decompressing and writing a downloaded file to ${destinationStream.path}`);
|
||||
core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
|
||||
reject(error);
|
||||
});
|
||||
}
|
||||
else {
|
||||
response.message
|
||||
.on('error', error => {
|
||||
core.error(`An error occurred while attempting to read the response stream`);
|
||||
destinationStream.close();
|
||||
reject(error);
|
||||
})
|
||||
.pipe(destinationStream)
|
||||
.on('close', () => {
|
||||
resolve();
|
||||
})
|
||||
.on('error', error => {
|
||||
core.error(`An error has been encountered while writing a downloaded file to ${destinationStream.path}`);
|
||||
core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
|
||||
reject(error);
|
||||
});
|
||||
}
|
||||
@ -8137,6 +8382,41 @@ function createEmptyFilesForArtifact(emptyFilesToCreate) {
|
||||
});
|
||||
}
|
||||
exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact;
|
||||
function getFileSize(filePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const stats = yield fs_1.promises.stat(filePath);
|
||||
core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`);
|
||||
return stats.size;
|
||||
});
|
||||
}
|
||||
exports.getFileSize = getFileSize;
|
||||
function rmFile(filePath) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield fs_1.promises.unlink(filePath);
|
||||
});
|
||||
}
|
||||
exports.rmFile = rmFile;
|
||||
function getProperRetention(retentionInput, retentionSetting) {
|
||||
if (retentionInput < 0) {
|
||||
throw new Error('Invalid retention, minimum value is 1.');
|
||||
}
|
||||
let retention = retentionInput;
|
||||
if (retentionSetting) {
|
||||
const maxRetention = parseInt(retentionSetting);
|
||||
if (!isNaN(maxRetention) && maxRetention < retention) {
|
||||
core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`);
|
||||
retention = maxRetention;
|
||||
}
|
||||
}
|
||||
return retention;
|
||||
}
|
||||
exports.getProperRetention = getProperRetention;
|
||||
function sleep(milliseconds) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise(resolve => setTimeout(resolve, milliseconds));
|
||||
});
|
||||
}
|
||||
exports.sleep = sleep;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -8455,12 +8735,11 @@ exports.Pattern = Pattern;
|
||||
/***/ }),
|
||||
|
||||
/***/ 950:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const url = __webpack_require__(835);
|
||||
function getProxyUrl(reqUrl) {
|
||||
let usingSsl = reqUrl.protocol === 'https:';
|
||||
let proxyUrl;
|
||||
@ -8475,7 +8754,7 @@ function getProxyUrl(reqUrl) {
|
||||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
proxyUrl = new URL(proxyVar);
|
||||
}
|
||||
return proxyUrl;
|
||||
}
|
||||
|
3417
package-lock.json
generated
3417
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
14
package.json
14
package.json
@ -28,21 +28,23 @@
|
||||
"url": "https://github.com/actions/upload-artifact/issues"
|
||||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"devDependencies": {
|
||||
"@actions/artifact": "^0.3.5",
|
||||
"@actions/core": "^1.2.3",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^0.5.0",
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2",
|
||||
"@actions/io": "^1.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^25.2.1",
|
||||
"@types/node": "^13.11.1",
|
||||
"@typescript-eslint/parser": "^2.27.0",
|
||||
"@zeit/ncc": "^0.22.1",
|
||||
"concurrently": "^5.1.0",
|
||||
"eslint": "^7.4.0",
|
||||
"eslint-plugin-github": "^3.4.1",
|
||||
"eslint-plugin-github": "^4.1.1",
|
||||
"eslint-plugin-jest": "^23.8.2",
|
||||
"glob": "^7.1.6",
|
||||
"jest": "^26.1.0",
|
||||
"jest": "^26.6.3",
|
||||
"jest-circus": "^26.1.0",
|
||||
"prettier": "^2.0.4",
|
||||
"ts-jest": "^25.3.1",
|
||||
|
@ -1,7 +1,8 @@
|
||||
export enum Inputs {
|
||||
Name = 'name',
|
||||
Path = 'path',
|
||||
IfNoFilesFound = 'if-no-files-found'
|
||||
IfNoFilesFound = 'if-no-files-found',
|
||||
RetentionDays = 'retention-days'
|
||||
}
|
||||
|
||||
export enum NoFileOptions {
|
||||
|
@ -22,9 +22,19 @@ export function getInputs(): UploadInputs {
|
||||
)
|
||||
}
|
||||
|
||||
return {
|
||||
const inputs = {
|
||||
artifactName: name,
|
||||
searchPath: path,
|
||||
ifNoFilesFound: noFileBehavior
|
||||
} as UploadInputs
|
||||
|
||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||
if (retentionDaysStr) {
|
||||
inputs.retentionDays = parseInt(retentionDaysStr)
|
||||
if (isNaN(inputs.retentionDays)) {
|
||||
core.setFailed('Invalid retention-days')
|
||||
}
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
@ -31,8 +31,9 @@ async function run(): Promise<void> {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const s = searchResult.filesToUpload.length === 1 ? '' : 's'
|
||||
core.info(
|
||||
`With the provided path, there will be ${searchResult.filesToUpload.length} file(s) uploaded`
|
||||
`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`
|
||||
)
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`)
|
||||
|
||||
@ -40,6 +41,10 @@ async function run(): Promise<void> {
|
||||
const options: UploadOptions = {
|
||||
continueOnError: false
|
||||
}
|
||||
if (inputs.retentionDays) {
|
||||
options.retentionDays = inputs.retentionDays
|
||||
}
|
||||
|
||||
const uploadResponse = await artifactClient.uploadArtifact(
|
||||
inputs.artifactName,
|
||||
searchResult.filesToUpload,
|
||||
|
@ -15,4 +15,9 @@ export interface UploadInputs {
|
||||
* The desired behavior if no files are found with the provided search path
|
||||
*/
|
||||
ifNoFilesFound: NoFileOptions
|
||||
|
||||
/**
|
||||
* Duration after which artifact will expire in days
|
||||
*/
|
||||
retentionDays: number
|
||||
}
|
||||
|
Reference in New Issue
Block a user