Compare commits
112 Commits
releases/v
...
main
Author | SHA1 | Date |
---|---|---|
Bethany | a8a3f3ad30 | |
Bethany | 7b48769c03 | |
Bethany | 66630398df | |
Bethany | 55e76b779d | |
Luís Ferreira | 65d862660a | |
Luís Ferreira | 0b7f8abb15 | |
Konrad Pabjan | 013d2b89ba | |
Konrad Pabjan | 055b8b3f04 | |
Luís Ferreira | 7a5d4831f7 | |
George Melikov | e0057a5b76 | |
James M. Greene | 7fe6c13ac8 | |
Konrad Pabjan | 83fd05a356 | |
Rob Herley | 3cea537223 | |
Rob Herley | 849aa7758a | |
Rob Herley | 4d3986961d | |
Rob Herley | 2e0d362ec5 | |
Yang Cao | 09a5d6a283 | |
dependabot[bot] | 189315d910 | |
Yang Cao | d159c2d80b | |
dependabot[bot] | c26a7ba4b5 | |
Yang Cao | 6ed6c72922 | |
dependabot[bot] | 2aeee267b2 | |
Yang Cao | 0e2226cf92 | |
dependabot[bot] | f29a5d04a6 | |
Jonathan Tamsut | 6673cd052c | |
Thomas Boop | 2244c82003 | |
Konrad Pabjan | 87348cee5f | |
Konrad Pabjan | 82c141cc51 | |
Konrad Pabjan | da838ae959 | |
Jozef Izso | f4ac36d205 | |
Konrad Pabjan | 5f375cca4b | |
Konrad Pabjan | a009a66585 | |
dependabot[bot] | b9bb65708e | |
Peter Mescalchin | 0b3de3e43b | |
Rob Herley | 54f3f7f081 | |
Rob Herley | f061e61d75 | |
Rob Herley | ee82d6a40e | |
Rob Herley | ecd66f1683 | |
Alberto Gimeno | 37da8e8d08 | |
dependabot[bot] | cd62eed952 | |
Alberto Gimeno | 6fc84b101e | |
Alberto Gimeno | 551bc9bdf8 | |
Brian Cristante | 11e311c8b5 | |
Brian Cristante | 317b58f0d1 | |
Brian Cristante | 8d06e6c094 | |
Brian Cristante | 27121b0bdf | |
dependabot[bot] | 4537e112f4 | |
Brian Cristante | 2368feccd5 | |
dependabot[bot] | 52a3c6b975 | |
dependabot[bot] | 76f4433885 | |
Brian Cristante | 46426468d3 | |
Brian Cristante | 8507687653 | |
Brian Cristante | 0482dbbe7a | |
Brian Cristante | 58518184d2 | |
Jason Gross | 4db8255e70 | |
Robert Cannon | 9243a41f97 | |
Remy Kabel | 9dc0ee81a2 | |
dependabot[bot] | 2e3d6da508 | |
Konrad Pabjan | ee69f02b3d | |
dependabot[bot] | bcd44ad93d | |
Yang Cao | ea3d524381 | |
Yang Cao | e95d1b9c8d | |
Yang Cao | 0fbc4b771a | |
Robin Neatherway | a1af2e8f43 | |
Konrad Pabjan | a4f98af3fa | |
Konrad Pabjan | 16b8b2b2e7 | |
Konrad Pabjan | e448a9b857 | |
Hugo van Kemenade | e6bd6b7749 | |
Josh Gross | 1fd4c858f9 | |
Josh Gross | a2af908e3a | |
Brian Cristante | 928d1a16d9 | |
Konrad Pabjan | e4a7ffadfc | |
Brian Westphal | 726a6dcd01 | |
Josh Gross | 3db166e2ea | |
Josh Gross | d86048c66c | |
Josh Gross | 328d69042a | |
Yang Cao | 27bce4eee7 | |
Thomas Boop | f8b42f7ab4 | |
Thomas Boop | 2106e8cf10 | |
Thomas Boop | db66798ebc | |
Thomas Boop | d359fd0772 | |
Thomas Boop | 350822c32f | |
Justin Weissig | abecf4abf4 | |
Yang Cao | 604e071d21 | |
Yang Cao | 4560c23b39 | |
Yang Cao | 59018c2f85 | |
Konrad Pabjan | 58740802ef | |
Thomas Boop | f109393e79 | |
Konrad Pabjan | 268d754764 | |
Konrad Pabjan | c8879bf5ae | |
Konrad Pabjan | 5ba29a7d5b | |
Konrad Pabjan | 5f948bc1f0 | |
dependabot[bot] | 589ca5fbdd | |
Konrad Pabjan | 8ec57c93cb | |
Konrad Pabjan | ee5fe7718d | |
Konrad Pabjan | 0c366cb4fc | |
Konrad Pabjan | 63d6076e6f | |
Konrad Pabjan | ebad382c09 | |
Konrad Pabjan | 90f03bd03e | |
Konrad Pabjan | f265ac5693 | |
Konrad Pabjan | 4347a0d55a | |
Konrad Pabjan | 97b7dace6c | |
Nick Schonning | 9bcc1e21d0 | |
Josh Gross | f729db2261 | |
Josh Gross | d6636db5bd | |
Josh Gross | 1c797a4e6c | |
dependabot[bot] | 4a30538ab4 | |
Konrad Pabjan | e59920f5cf | |
Konrad Pabjan | e7eefc4917 | |
Manolo Edge | 1283ca12b6 | |
Konrad Pabjan | 8eb149d680 | |
Konrad Pabjan | ceafd8bc4a |
|
@ -0,0 +1,6 @@
|
||||||
|
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||||
|
// README at: https://github.com/devcontainers/templates/tree/main/src/typescript-node
|
||||||
|
{
|
||||||
|
"name": "@actions/upload-artifact",
|
||||||
|
"image": "mcr.microsoft.com/devcontainers/typescript-node:0-16"
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
node_modules/
|
||||||
|
lib/
|
||||||
|
dist/
|
|
@ -0,0 +1,16 @@
|
||||||
|
{
|
||||||
|
"env": { "node": true, "jest": true },
|
||||||
|
"parser": "@typescript-eslint/parser",
|
||||||
|
"parserOptions": { "ecmaVersion": 9, "sourceType": "module" },
|
||||||
|
"extends": [
|
||||||
|
"eslint:recommended",
|
||||||
|
"plugin:import/errors",
|
||||||
|
"plugin:import/warnings",
|
||||||
|
"plugin:import/typescript",
|
||||||
|
"plugin:prettier/recommended"
|
||||||
|
],
|
||||||
|
"rules": {
|
||||||
|
"@typescript-eslint/no-empty-function": "off"
|
||||||
|
},
|
||||||
|
"plugins": ["@typescript-eslint", "jest"]
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
* text=auto eol=lf
|
||||||
|
.licenses/** -diff linguist-generated=true
|
|
@ -0,0 +1 @@
|
||||||
|
* @actions/artifacts-actions
|
|
@ -0,0 +1,63 @@
|
||||||
|
name: "🐛 Bug report"
|
||||||
|
description: Let us know about a bug!
|
||||||
|
labels: ['bug']
|
||||||
|
title: '[bug]'
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: problem
|
||||||
|
attributes:
|
||||||
|
label: What happened?
|
||||||
|
description: |
|
||||||
|
Please provide a clear and concise description of what the bug is. If applicable, add screenshots to help explain your problem.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: expected
|
||||||
|
attributes:
|
||||||
|
label: What did you expect to happen?
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: repro
|
||||||
|
attributes:
|
||||||
|
label: How can we reproduce it?
|
||||||
|
description: |
|
||||||
|
Please be minimal and precise as possible. If your repo/run is public, please include a URL so it is easier for us to investigate.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: additional
|
||||||
|
attributes:
|
||||||
|
label: Anything else we need to know?
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: version
|
||||||
|
attributes:
|
||||||
|
label: What version of the action are you using?
|
||||||
|
placeholder: vX.Y.Z
|
||||||
|
description: |
|
||||||
|
Please check the documentation first since different major versions can have different behaviors.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: dropdown
|
||||||
|
id: environment
|
||||||
|
attributes:
|
||||||
|
label: What are your runner environments?
|
||||||
|
multiple: true
|
||||||
|
options:
|
||||||
|
- self-hosted
|
||||||
|
- linux
|
||||||
|
- window
|
||||||
|
- macos
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: input
|
||||||
|
id: ghes
|
||||||
|
attributes:
|
||||||
|
label: Are you on GitHub Enterprise Server? If so, what version?
|
||||||
|
placeholder: vX.Y
|
|
@ -0,0 +1,5 @@
|
||||||
|
blank_issues_enabled: false
|
||||||
|
contact_links:
|
||||||
|
- name: 🙋 Ask a question
|
||||||
|
url: https://github.community/c/code-to-cloud/52
|
||||||
|
about: Please ask and answer questions on GitHub Support Community.
|
|
@ -0,0 +1,28 @@
|
||||||
|
name: "📚 Documentation issues"
|
||||||
|
description: Make a suggestion to improve the documentation!
|
||||||
|
labels: ['documentation']
|
||||||
|
title: '[docs]'
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
❗ This is only for documentation updates for files in this repo, ie: `README.md`.
|
||||||
|
|
||||||
|
If you want to suggest changes for the [GitHub Docs](https://docs.github.com/), please [open an issue there](https://github.com/github/docs/issues/new/choose).
|
||||||
|
- type: textarea
|
||||||
|
id: affected
|
||||||
|
attributes:
|
||||||
|
label: What files would you like to change?
|
||||||
|
description: |
|
||||||
|
Please provide permalinks to the specified files and line numbers.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: suggested
|
||||||
|
attributes:
|
||||||
|
label: What are your suggested changes?
|
||||||
|
description: |
|
||||||
|
Give as much detail as you can to help us understand the changes you want to see.
|
||||||
|
validations:
|
||||||
|
required: true
|
|
@ -0,0 +1,20 @@
|
||||||
|
name: "🎁 Feature request"
|
||||||
|
description: Suggest a new feature/enhancement!
|
||||||
|
labels: ['enhancement']
|
||||||
|
title: '[feat req]'
|
||||||
|
body:
|
||||||
|
- type: textarea
|
||||||
|
id: feature
|
||||||
|
attributes:
|
||||||
|
label: What would you like to be added?
|
||||||
|
description: |
|
||||||
|
Please check existing issues to avoid making duplicates. Any duplicate issue will be closed immediately.
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
|
||||||
|
- type: textarea
|
||||||
|
id: reasoning
|
||||||
|
attributes:
|
||||||
|
label: Why is this needed?
|
||||||
|
validations:
|
||||||
|
required: true
|
|
@ -0,0 +1,52 @@
|
||||||
|
# `dist/index.js` is a special file in Actions.
|
||||||
|
# When you reference an action with `uses:` in a workflow,
|
||||||
|
# `index.js` is the code that will run.
|
||||||
|
# For our project, we generate this file through a build process
|
||||||
|
# from other source files.
|
||||||
|
# We need to make sure the checked-in `index.js` actually matches what we expect it to be.
|
||||||
|
name: Check dist/
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-dist:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Node 16
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 16.x
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Move the committed index.js file
|
||||||
|
run: mv dist/index.js /tmp
|
||||||
|
|
||||||
|
- name: Rebuild with tsc
|
||||||
|
run: npm run build
|
||||||
|
|
||||||
|
- name: Rebuild the index.js file
|
||||||
|
run: npm run release
|
||||||
|
|
||||||
|
- name: Compare the expected and actual index.js files
|
||||||
|
run: git diff --ignore-all-space dist/index.js /tmp/index.js
|
||||||
|
id: diff
|
||||||
|
|
||||||
|
# If index.js was different than expected, upload the expected version as an artifact
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
if: ${{ failure() && steps.diff.conclusion == 'failure' }}
|
||||||
|
with:
|
||||||
|
name: index.js
|
||||||
|
path: dist/index.js
|
|
@ -0,0 +1,46 @@
|
||||||
|
name: "Code scanning - action"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches-ignore: "dependabot/**"
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- '**.md'
|
||||||
|
schedule:
|
||||||
|
- cron: '0 6 * * 3'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
CodeQL-Build:
|
||||||
|
|
||||||
|
# CodeQL runs on ubuntu-latest and windows-latest
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v2
|
||||||
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
|
# with:
|
||||||
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
||||||
|
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||||
|
# and modify them (or add more) to build your code if your project
|
||||||
|
# uses a compiled language
|
||||||
|
|
||||||
|
#- run: |
|
||||||
|
# make bootstrap
|
||||||
|
# make release
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v2
|
|
@ -0,0 +1,24 @@
|
||||||
|
name: Licensed
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
name: Check licenses
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- run: npm ci
|
||||||
|
- name: Install licensed
|
||||||
|
run: |
|
||||||
|
cd $RUNNER_TEMP
|
||||||
|
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/2.12.2/licensed-2.12.2-linux-x64.tar.gz
|
||||||
|
sudo tar -xzf licensed.tar.gz
|
||||||
|
sudo mv licensed /usr/local/bin/licensed
|
||||||
|
- run: licensed status
|
|
@ -0,0 +1,28 @@
|
||||||
|
name: Release new action version
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [released]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
TAG_NAME:
|
||||||
|
description: 'Tag name that the major tag will point to'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
TAG_NAME: ${{ github.event.inputs.TAG_NAME || github.event.release.tag_name }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update_tag:
|
||||||
|
name: Update the major tag to include the ${{ github.event.inputs.TAG_NAME || github.event.release.tag_name }} changes
|
||||||
|
environment:
|
||||||
|
name: releaseNewActionVersion
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Update the ${{ env.TAG_NAME }} tag
|
||||||
|
id: update-major-tag
|
||||||
|
uses: actions/publish-action@v0.2.1
|
||||||
|
with:
|
||||||
|
source-tag: ${{ env.TAG_NAME }}
|
||||||
|
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
|
|
@ -0,0 +1,169 @@
|
||||||
|
name: Test
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths-ignore:
|
||||||
|
- '**.md'
|
||||||
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- '**.md'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
build:
|
||||||
|
name: Build
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
runs-on: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.runs-on }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Node 16
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 16.x
|
||||||
|
cache: 'npm'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Compile
|
||||||
|
run: npm run build
|
||||||
|
|
||||||
|
- name: Lint
|
||||||
|
run: npm run lint
|
||||||
|
|
||||||
|
- name: Format
|
||||||
|
run: npm run format-check
|
||||||
|
|
||||||
|
- name: Test
|
||||||
|
run: npm run test
|
||||||
|
|
||||||
|
# Test end-to-end by uploading two artifacts and then downloading them
|
||||||
|
- name: Create artifact files
|
||||||
|
run: |
|
||||||
|
mkdir -p path/to/dir-1
|
||||||
|
mkdir -p path/to/dir-2
|
||||||
|
mkdir -p path/to/dir-3
|
||||||
|
echo "Lorem ipsum dolor sit amet" > path/to/dir-1/file1.txt
|
||||||
|
echo "Hello world from file #2" > path/to/dir-2/file2.txt
|
||||||
|
echo "This is a going to be a test for a large enough file that should get compressed with GZip. The @actions/artifact package uses GZip to upload files. This text should have a compression ratio greater than 100% so it should get uploaded using GZip" > path/to/dir-3/gzip.txt
|
||||||
|
|
||||||
|
# Upload a single file artifact
|
||||||
|
- name: 'Upload artifact #1'
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
name: 'Artifact-A'
|
||||||
|
path: path/to/dir-1/file1.txt
|
||||||
|
|
||||||
|
# Upload using a wildcard pattern, name should default to 'artifact' if not provided
|
||||||
|
- name: 'Upload artifact #2'
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
path: path/**/dir*/
|
||||||
|
|
||||||
|
# Upload a directory that contains a file that will be uploaded with GZip
|
||||||
|
- name: 'Upload artifact #3'
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
name: 'GZip-Artifact'
|
||||||
|
path: path/to/dir-3/
|
||||||
|
|
||||||
|
# Upload a directory that contains a file that will be uploaded with GZip
|
||||||
|
- name: 'Upload artifact #4'
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
name: 'Multi-Path-Artifact'
|
||||||
|
path: |
|
||||||
|
path/to/dir-1/*
|
||||||
|
path/to/dir-[23]/*
|
||||||
|
!path/to/dir-3/*.txt
|
||||||
|
|
||||||
|
# Download Artifact #1 and verify the correctness of the content
|
||||||
|
- name: 'Download artifact #1'
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: 'Artifact-A'
|
||||||
|
path: some/new/path
|
||||||
|
|
||||||
|
- name: 'Verify Artifact #1'
|
||||||
|
run: |
|
||||||
|
$file = "some/new/path/file1.txt"
|
||||||
|
if(!(Test-Path -path $file))
|
||||||
|
{
|
||||||
|
Write-Error "Expected file does not exist"
|
||||||
|
}
|
||||||
|
if(!((Get-Content $file) -ceq "Lorem ipsum dolor sit amet"))
|
||||||
|
{
|
||||||
|
Write-Error "File contents of downloaded artifact are incorrect"
|
||||||
|
}
|
||||||
|
shell: pwsh
|
||||||
|
|
||||||
|
# Download Artifact #2 and verify the correctness of the content
|
||||||
|
- name: 'Download artifact #2'
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: 'artifact'
|
||||||
|
path: some/other/path
|
||||||
|
|
||||||
|
- name: 'Verify Artifact #2'
|
||||||
|
run: |
|
||||||
|
$file1 = "some/other/path/to/dir-1/file1.txt"
|
||||||
|
$file2 = "some/other/path/to/dir-2/file2.txt"
|
||||||
|
if(!(Test-Path -path $file1) -or !(Test-Path -path $file2))
|
||||||
|
{
|
||||||
|
Write-Error "Expected files do not exist"
|
||||||
|
}
|
||||||
|
if(!((Get-Content $file1) -ceq "Lorem ipsum dolor sit amet") -or !((Get-Content $file2) -ceq "Hello world from file #2"))
|
||||||
|
{
|
||||||
|
Write-Error "File contents of downloaded artifacts are incorrect"
|
||||||
|
}
|
||||||
|
shell: pwsh
|
||||||
|
|
||||||
|
# Download Artifact #3 and verify the correctness of the content
|
||||||
|
- name: 'Download artifact #3'
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: 'GZip-Artifact'
|
||||||
|
path: gzip/artifact/path
|
||||||
|
|
||||||
|
# Because a directory was used as input during the upload the parent directories, path/to/dir-3/, should not be included in the uploaded artifact
|
||||||
|
- name: 'Verify Artifact #3'
|
||||||
|
run: |
|
||||||
|
$gzipFile = "gzip/artifact/path/gzip.txt"
|
||||||
|
if(!(Test-Path -path $gzipFile))
|
||||||
|
{
|
||||||
|
Write-Error "Expected file do not exist"
|
||||||
|
}
|
||||||
|
if(!((Get-Content $gzipFile) -ceq "This is a going to be a test for a large enough file that should get compressed with GZip. The @actions/artifact package uses GZip to upload files. This text should have a compression ratio greater than 100% so it should get uploaded using GZip"))
|
||||||
|
{
|
||||||
|
Write-Error "File contents of downloaded artifact is incorrect"
|
||||||
|
}
|
||||||
|
shell: pwsh
|
||||||
|
|
||||||
|
- name: 'Download artifact #4'
|
||||||
|
uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: 'Multi-Path-Artifact'
|
||||||
|
path: multi/artifact
|
||||||
|
|
||||||
|
- name: 'Verify Artifact #4'
|
||||||
|
run: |
|
||||||
|
$file1 = "multi/artifact/dir-1/file1.txt"
|
||||||
|
$file2 = "multi/artifact/dir-2/file2.txt"
|
||||||
|
if(!(Test-Path -path $file1) -or !(Test-Path -path $file2))
|
||||||
|
{
|
||||||
|
Write-Error "Expected files do not exist"
|
||||||
|
}
|
||||||
|
if(!((Get-Content $file1) -ceq "Lorem ipsum dolor sit amet") -or !((Get-Content $file2) -ceq "Hello world from file #2"))
|
||||||
|
{
|
||||||
|
Write-Error "File contents of downloaded artifacts are incorrect"
|
||||||
|
}
|
||||||
|
shell: pwsh
|
|
@ -0,0 +1,3 @@
|
||||||
|
node_modules/
|
||||||
|
lib/
|
||||||
|
__tests__/_temp/
|
|
@ -0,0 +1,15 @@
|
||||||
|
sources:
|
||||||
|
npm: true
|
||||||
|
|
||||||
|
allowed:
|
||||||
|
- apache-2.0
|
||||||
|
- bsd-2-clause
|
||||||
|
- bsd-3-clause
|
||||||
|
- isc
|
||||||
|
- mit
|
||||||
|
- cc0-1.0
|
||||||
|
- unlicense
|
||||||
|
|
||||||
|
reviewed:
|
||||||
|
npm:
|
||||||
|
- fs.realpath
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,3 @@
|
||||||
|
dist/
|
||||||
|
lib/
|
||||||
|
node_modules/
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"printWidth": 80,
|
||||||
|
"tabWidth": 2,
|
||||||
|
"useTabs": false,
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "none",
|
||||||
|
"bracketSpacing": false,
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"parser": "typescript"
|
||||||
|
}
|
|
@ -0,0 +1,76 @@
|
||||||
|
# Contributor Covenant Code of Conduct
|
||||||
|
|
||||||
|
## Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to make participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||||
|
level of experience, education, socio-economic status, nationality, personal
|
||||||
|
appearance, race, religion, or sexual identity and orientation.
|
||||||
|
|
||||||
|
## Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
* Using welcoming and inclusive language
|
||||||
|
* Being respectful of differing viewpoints and experiences
|
||||||
|
* Gracefully accepting constructive criticism
|
||||||
|
* Focusing on what is best for the community
|
||||||
|
* Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
* Public or private harassment
|
||||||
|
* Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
* Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
## Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
## Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies within all project spaces, and it also applies when
|
||||||
|
an individual is representing the project or its community in public spaces.
|
||||||
|
Examples of representing a project or community include using an official
|
||||||
|
project e-mail address, posting via an official social media account, or acting
|
||||||
|
as an appointed representative at an online or offline event. Representation of
|
||||||
|
a project may be further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
## Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at opensource@github.com. All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
## Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||||
|
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||||
|
|
||||||
|
[homepage]: https://www.contributor-covenant.org
|
||||||
|
|
||||||
|
For answers to common questions about this code of conduct, see
|
||||||
|
https://www.contributor-covenant.org/faq
|
|
@ -0,0 +1,56 @@
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
[fork]: https://github.com/actions/upload-artifact/fork
|
||||||
|
[pr]: https://github.com/actions/upload-artifact/compare
|
||||||
|
[style]: https://github.com/styleguide/js
|
||||||
|
[code-of-conduct]: CODE_OF_CONDUCT.md
|
||||||
|
|
||||||
|
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great.
|
||||||
|
|
||||||
|
Contributions to this project are [released](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license) to the public under the [project's open source license](LICENSE).
|
||||||
|
|
||||||
|
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
||||||
|
|
||||||
|
## Found a bug?
|
||||||
|
|
||||||
|
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/actions/upload-artifact/issues).
|
||||||
|
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/actions/upload-artifact/issues/new). Be sure to include a **title and clear description**, as much relevant information as possible, and a **code sample** or a **reproducable test case** demonstrating the expected behavior that is not occurring.
|
||||||
|
- If possible, use the relevant bug report templates to create the issue.
|
||||||
|
|
||||||
|
## What should I know before submitting a pull request or issue
|
||||||
|
|
||||||
|
The code related to `upload-artifact` is split between this repository and [actions/toolkit](https://github.com/actions/toolkit) where the `@actions/artifact` npm package is housed. The npm package contains the core functionality to interact with artifacts. Any extra functionality on top of interacting with the apis such as search is inside this repository.
|
||||||
|
Artifact related issues will be tracked in this repository so please do not open duplicate issues in `actions/toolkit`.
|
||||||
|
|
||||||
|
## Submitting a pull request
|
||||||
|
|
||||||
|
1. [Fork][fork] and clone the repository
|
||||||
|
2. Configure and install the dependencies: `npm install`
|
||||||
|
3. Make sure the tests pass on your machine: `npm run test`
|
||||||
|
4. Create a new branch: `git checkout -b my-branch-name`
|
||||||
|
5. Make your change, add tests, and make sure the tests still pass
|
||||||
|
6. Make sure your code is correctly formatted: `npm run format`
|
||||||
|
7. Make sure your code passes linting: `npm run lint`
|
||||||
|
8. Update `dist/index.js` using `npm run release`. This creates a single javascript file that is used as an entry-point for the action
|
||||||
|
7. Push to your fork and [submit a pull request][pr]
|
||||||
|
8. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||||
|
|
||||||
|
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||||
|
|
||||||
|
- Write tests.
|
||||||
|
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||||
|
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||||
|
|
||||||
|
## Licensed
|
||||||
|
|
||||||
|
This repository uses a tool called [Licensed](https://github.com/github/licensed) to verify third party dependencies. You may need to locally install licensed and run `licensed cache` to update the dependency cache if you install or update a production dependency. If licensed cache is unable to determine the dependency, you may need to modify the cache file yourself to put the correct license. You should still verify the dependency, licensed is a tool to help, but is not a substitute for human review of dependencies.
|
||||||
|
|
||||||
|
## Resources
|
||||||
|
|
||||||
|
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||||
|
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
||||||
|
- [GitHub Help](https://help.github.com)
|
||||||
|
|
||||||
|
Thanks! :heart: :heart: :heart:
|
||||||
|
|
||||||
|
GitHub Actions Team :octocat:
|
271
README.md
271
README.md
|
@ -1,44 +1,293 @@
|
||||||
# upload-artifact
|
# Upload-Artifact v3
|
||||||
|
|
||||||
This uploads artifacts from your workflow.
|
This uploads artifacts from your workflow allowing you to share data between jobs and store data once a workflow is complete.
|
||||||
|
|
||||||
See also [download-artifact](https://github.com/actions/download-artifact).
|
See also [download-artifact](https://github.com/actions/download-artifact).
|
||||||
|
|
||||||
|
# What's new
|
||||||
|
|
||||||
|
- Easier upload
|
||||||
|
- Specify a wildcard pattern
|
||||||
|
- Specify an individual file
|
||||||
|
- Specify a directory (previously you were limited to only this option)
|
||||||
|
- Multi path upload
|
||||||
|
- Use a combination of individual files, wildcards or directories
|
||||||
|
- Support for excluding certain files
|
||||||
|
- Upload an artifact without providing a name
|
||||||
|
- Fix for artifact uploads sometimes not working with containers
|
||||||
|
- Proxy support out of the box
|
||||||
|
- Port entire action to typescript from a runner plugin so it is easier to collaborate and accept contributions
|
||||||
|
|
||||||
|
Refer [here](https://github.com/actions/upload-artifact/tree/releases/v1) for the previous version
|
||||||
|
|
||||||
# Usage
|
# Usage
|
||||||
|
|
||||||
See [action.yml](action.yml)
|
See [action.yml](action.yml)
|
||||||
|
|
||||||
Basic:
|
### Upload an Individual File
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- run: mkdir -p path/to/artifact
|
- run: mkdir -p path/to/artifact
|
||||||
|
|
||||||
- run: echo hello > path/to/artifact/world.txt
|
- run: echo hello > path/to/artifact/world.txt
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v1
|
- uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: my-artifact
|
name: my-artifact
|
||||||
path: path/to/artifact
|
path: path/to/artifact/world.txt
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Upload an Entire Directory
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: path/to/artifact/ # or path/to/artifact
|
||||||
|
```
|
||||||
|
|
||||||
|
### Upload using a Wildcard Pattern
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: path/**/[abc]rtifac?/*
|
||||||
|
```
|
||||||
|
|
||||||
|
### Upload using Multiple Paths and Exclusions
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: |
|
||||||
|
path/output/bin/
|
||||||
|
path/output/test-results
|
||||||
|
!path/**/*.tmp
|
||||||
|
```
|
||||||
|
|
||||||
|
For supported wildcards along with behavior and documentation, see [@actions/glob](https://github.com/actions/toolkit/tree/main/packages/glob) which is used internally to search for files.
|
||||||
|
|
||||||
|
If a wildcard pattern is used, the path hierarchy will be preserved after the first wildcard pattern:
|
||||||
|
|
||||||
|
```
|
||||||
|
path/to/*/directory/foo?.txt =>
|
||||||
|
∟ path/to/some/directory/foo1.txt
|
||||||
|
∟ path/to/some/directory/foo2.txt
|
||||||
|
∟ path/to/other/directory/foo1.txt
|
||||||
|
|
||||||
|
would be flattened and uploaded as =>
|
||||||
|
∟ some/directory/foo1.txt
|
||||||
|
∟ some/directory/foo2.txt
|
||||||
|
∟ other/directory/foo1.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
If multiple paths are provided as input, the least common ancestor of all the search paths will be used as the root directory of the artifact. Exclude paths do not affect the directory structure.
|
||||||
|
|
||||||
|
Relative and absolute file paths are both allowed. Relative paths are rooted against the current working directory. Paths that begin with a wildcard character should be quoted to avoid being interpreted as YAML aliases.
|
||||||
|
|
||||||
|
The [@actions/artifact](https://github.com/actions/toolkit/tree/main/packages/artifact) package is used internally to handle most of the logic around uploading an artifact. There is extra documentation around upload limitations and behavior in the toolkit repo that is worth checking out.
|
||||||
|
|
||||||
|
### Customization if no files are found
|
||||||
|
|
||||||
|
If a path (or paths), result in no files being found for the artifact, the action will succeed but print out a warning. In certain scenarios it may be desirable to fail the action or suppress the warning. The `if-no-files-found` option allows you to customize the behavior of the action if no files are found:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: path/to/artifact/
|
||||||
|
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
|
||||||
|
```
|
||||||
|
|
||||||
|
### Conditional Artifact Upload
|
||||||
|
|
||||||
To upload artifacts only when the previous step of a job failed, use [`if: failure()`](https://help.github.com/en/articles/contexts-and-expression-syntax-for-github-actions#job-status-check-functions):
|
To upload artifacts only when the previous step of a job failed, use [`if: failure()`](https://help.github.com/en/articles/contexts-and-expression-syntax-for-github-actions#job-status-check-functions):
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/upload-artifact@v1
|
- uses: actions/upload-artifact@v3
|
||||||
if: failure()
|
if: failure()
|
||||||
with:
|
with:
|
||||||
name: my-artifact
|
name: my-artifact
|
||||||
path: path/to/artifact
|
path: path/to/artifact/
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Uploading without an artifact name
|
||||||
|
|
||||||
|
You can upload an artifact without specifying a name
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: path/to/artifact/world.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
If not provided, `artifact` will be used as the default name which will manifest itself in the UI after upload.
|
||||||
|
|
||||||
|
### Uploading to the same artifact
|
||||||
|
|
||||||
|
With the following example, the available artifact (named `artifact` by default if no name is provided) would contain both `world.txt` (`hello`) and `extra-file.txt` (`howdy`):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- run: echo hi > world.txt
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: world.txt
|
||||||
|
|
||||||
|
- run: echo howdy > extra-file.txt
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: extra-file.txt
|
||||||
|
|
||||||
|
- run: echo hello > world.txt
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: world.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
Each artifact behaves as a file share. Uploading to the same artifact multiple times in the same workflow can overwrite and append already uploaded files:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
node-version: [8.x, 10.x, 12.x, 13.x]
|
||||||
|
steps:
|
||||||
|
- name: Create a file
|
||||||
|
run: echo ${{ matrix.node-version }} > my_file.txt
|
||||||
|
- name: Accidentally upload to the same artifact via multiple jobs
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: ${{ github.workspace }}
|
||||||
|
```
|
||||||
|
|
||||||
|
> **_Warning:_** Be careful when uploading to the same artifact via multiple jobs as artifacts may become corrupted. When uploading a file with an identical name and path in multiple jobs, uploads may fail with 503 errors due to conflicting uploads happening at the same time. Ensure uploads to identical locations to not interfere with each other.
|
||||||
|
|
||||||
|
In the above example, four jobs will upload four different files to the same artifact but there will only be one file available when `my-artifact` is downloaded. Each job overwrites what was previously uploaded. To ensure that jobs don't overwrite existing artifacts, use a different name per job:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact ${{ matrix.node-version }}
|
||||||
|
path: ${{ github.workspace }}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Environment Variables and Tilde Expansion
|
||||||
|
|
||||||
|
You can use `~` in the path input as a substitute for `$HOME`. Basic tilde expansion is supported:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- run: |
|
||||||
|
mkdir -p ~/new/artifact
|
||||||
|
echo hello > ~/new/artifact/world.txt
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: Artifacts-V3
|
||||||
|
path: ~/new/**/*
|
||||||
|
```
|
||||||
|
|
||||||
|
Environment variables along with context expressions can also be used for input. For documentation see [context and expression syntax](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions):
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
env:
|
||||||
|
name: my-artifact
|
||||||
|
steps:
|
||||||
|
- run: |
|
||||||
|
mkdir -p ${{ github.workspace }}/artifact
|
||||||
|
echo hello > ${{ github.workspace }}/artifact/world.txt
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: ${{ env.name }}-name
|
||||||
|
path: ${{ github.workspace }}/artifact/**/*
|
||||||
|
```
|
||||||
|
|
||||||
|
For environment variables created in other steps, make sure to use the `env` expression syntax
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
steps:
|
||||||
|
- run: |
|
||||||
|
mkdir testing
|
||||||
|
echo "This is a file to upload" > testing/file.txt
|
||||||
|
echo "artifactPath=testing/file.txt" >> $GITHUB_ENV
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: artifact
|
||||||
|
path: ${{ env.artifactPath }} # this will resolve to testing/file.txt at runtime
|
||||||
|
```
|
||||||
|
|
||||||
|
### Retention Period
|
||||||
|
|
||||||
|
Artifacts are retained for 90 days by default. You can specify a shorter retention period using the `retention-days` input:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Create a file
|
||||||
|
run: echo "I won't live long" > my_file.txt
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: my_file.txt
|
||||||
|
retention-days: 5
|
||||||
|
```
|
||||||
|
|
||||||
|
The retention period must be between 1 and 90 inclusive. For more information see [artifact and log retention policies](https://docs.github.com/en/free-pro-team@latest/actions/reference/usage-limits-billing-and-administration#artifact-and-log-retention-policy).
|
||||||
|
|
||||||
## Where does the upload go?
|
## Where does the upload go?
|
||||||
In the top right corner of a workflow run, once the run is over, if you used this action, there will be a `Artifacts` dropdown which you can download items from. Here's a screenshot of what it looks like<br/>
|
|
||||||
![Artifacts Screenshot](https://user-images.githubusercontent.com/3685876/62906968-1b4aff80-bd3f-11e9-8815-9058eb05692a.png)
|
|
||||||
|
|
||||||
|
At the bottom of the workflow summary page, there is a dedicated section for artifacts. Here's a screenshot of something you might see:
|
||||||
|
|
||||||
|
<img src="https://user-images.githubusercontent.com/16109154/103645952-223c6880-4f59-11eb-8268-8dca6937b5f9.png" width="700" height="300">
|
||||||
|
|
||||||
|
There is a trashcan icon that can be used to delete the artifact. This icon will only appear for users who have write permissions to the repository.
|
||||||
|
|
||||||
|
The size of the artifact is denoted in bytes. The displayed artifact size denotes the raw uploaded artifact size (the sum of all the individual files uploaded during the workflow run for the artifact), not the compressed size. When you click to download an artifact from the summary page, a compressed zip is created with all the contents of the artifact and the size of the zip that you download may differ significantly from the displayed size. Billing is based on the raw uploaded size and not the size of the zip.
|
||||||
|
|
||||||
|
# Limitations
|
||||||
|
|
||||||
|
### Zipped Artifact Downloads
|
||||||
|
|
||||||
|
During a workflow run, files are uploaded and downloaded individually using the `upload-artifact` and `download-artifact` actions. However, when a workflow run finishes and an artifact is downloaded from either the UI or through the [download api](https://developer.github.com/v3/actions/artifacts/#download-an-artifact), a zip is dynamically created with all the file contents that were uploaded. There is currently no way to download artifacts after a workflow run finishes in a format other than a zip or to download artifact contents individually. One of the consequences of this limitation is that if a zip is uploaded during a workflow run and then downloaded from the UI, there will be a double zip created.
|
||||||
|
|
||||||
|
### Permission Loss
|
||||||
|
|
||||||
|
:exclamation: File permissions are not maintained during artifact upload :exclamation: For example, if you make a file executable using `chmod` and then upload that file, post-download the file is no longer guaranteed to be set as an executable.
|
||||||
|
|
||||||
|
### Case Insensitive Uploads
|
||||||
|
|
||||||
|
:exclamation: File uploads are case insensitive :exclamation: If you upload `A.txt` and `a.txt` with the same root path, only a single file will be saved and available during download.
|
||||||
|
|
||||||
|
### Maintaining file permissions and case sensitive files
|
||||||
|
|
||||||
|
If file permissions and case sensitivity are required, you can `tar` all of your files together before artifact upload. Post download, the `tar` file will maintain file permissions and case sensitivity:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Tar files
|
||||||
|
run: tar -cvf my_files.tar /path/to/my/directory
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: my-artifact
|
||||||
|
path: my_files.tar
|
||||||
|
```
|
||||||
|
|
||||||
|
### Too many uploads resulting in 429 responses
|
||||||
|
|
||||||
|
A very minute subset of users who upload a very very large amount of artifacts in a short period of time may see their uploads throttled or fail because of `Request was blocked due to exceeding usage of resource 'DBCPU' in namespace` or `Unable to copy file to server StatusCode=TooManyRequests`.
|
||||||
|
|
||||||
|
To reduce the chance of this happening, you can reduce the number of HTTP calls made during artifact upload by zipping or archiving the contents of your artifact before an upload starts. As an example, imagine an artifact with 1000 files (each 10 Kb in size). Without any modification, there would be around 1000 HTTP calls made to upload the artifact. If you zip or archive the artifact beforehand, the number of HTTP calls can be dropped to single digit territory. Measures like this will significantly speed up your upload and prevent uploads from being throttled or in some cases fail.
|
||||||
|
|
||||||
|
## Additional Documentation
|
||||||
|
|
||||||
|
See [Storing workflow data as artifacts](https://docs.github.com/en/actions/advanced-guides/storing-workflow-data-as-artifacts) for additional examples and tips.
|
||||||
|
|
||||||
|
See extra documentation for the [@actions/artifact](https://github.com/actions/toolkit/blob/main/packages/artifact/docs/additional-information.md) package that is used internally regarding certain behaviors and limitations.
|
||||||
|
|
||||||
# License
|
# License
|
||||||
|
|
||||||
The scripts and documentation in this project are released under the [MIT License](LICENSE)
|
The scripts and documentation in this project are released under the [MIT License](LICENSE).
|
||||||
|
|
|
@ -0,0 +1,355 @@
|
||||||
|
import * as core from '@actions/core'
|
||||||
|
import * as path from 'path'
|
||||||
|
import * as io from '@actions/io'
|
||||||
|
import {promises as fs} from 'fs'
|
||||||
|
import {findFilesToUpload} from '../src/search'
|
||||||
|
|
||||||
|
const root = path.join(__dirname, '_temp', 'search')
|
||||||
|
const searchItem1Path = path.join(
|
||||||
|
root,
|
||||||
|
'folder-a',
|
||||||
|
'folder-b',
|
||||||
|
'folder-c',
|
||||||
|
'search-item1.txt'
|
||||||
|
)
|
||||||
|
const searchItem2Path = path.join(root, 'folder-d', 'search-item2.txt')
|
||||||
|
const searchItem3Path = path.join(root, 'folder-d', 'search-item3.txt')
|
||||||
|
const searchItem4Path = path.join(root, 'folder-d', 'search-item4.txt')
|
||||||
|
const searchItem5Path = path.join(root, 'search-item5.txt')
|
||||||
|
const extraSearchItem1Path = path.join(
|
||||||
|
root,
|
||||||
|
'folder-a',
|
||||||
|
'folder-b',
|
||||||
|
'folder-c',
|
||||||
|
'extraSearch-item1.txt'
|
||||||
|
)
|
||||||
|
const extraSearchItem2Path = path.join(
|
||||||
|
root,
|
||||||
|
'folder-d',
|
||||||
|
'extraSearch-item2.txt'
|
||||||
|
)
|
||||||
|
const extraSearchItem3Path = path.join(
|
||||||
|
root,
|
||||||
|
'folder-f',
|
||||||
|
'extraSearch-item3.txt'
|
||||||
|
)
|
||||||
|
const extraSearchItem4Path = path.join(
|
||||||
|
root,
|
||||||
|
'folder-h',
|
||||||
|
'folder-i',
|
||||||
|
'extraSearch-item4.txt'
|
||||||
|
)
|
||||||
|
const extraSearchItem5Path = path.join(
|
||||||
|
root,
|
||||||
|
'folder-h',
|
||||||
|
'folder-i',
|
||||||
|
'extraSearch-item5.txt'
|
||||||
|
)
|
||||||
|
const extraFileInFolderCPath = path.join(
|
||||||
|
root,
|
||||||
|
'folder-a',
|
||||||
|
'folder-b',
|
||||||
|
'folder-c',
|
||||||
|
'extra-file-in-folder-c.txt'
|
||||||
|
)
|
||||||
|
const amazingFileInFolderHPath = path.join(root, 'folder-h', 'amazing-item.txt')
|
||||||
|
const lonelyFilePath = path.join(
|
||||||
|
root,
|
||||||
|
'folder-h',
|
||||||
|
'folder-j',
|
||||||
|
'folder-k',
|
||||||
|
'lonely-file.txt'
|
||||||
|
)
|
||||||
|
|
||||||
|
describe('Search', () => {
|
||||||
|
beforeAll(async () => {
|
||||||
|
// mock all output so that there is less noise when running tests
|
||||||
|
jest.spyOn(console, 'log').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'debug').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'info').mockImplementation(() => {})
|
||||||
|
jest.spyOn(core, 'warning').mockImplementation(() => {})
|
||||||
|
|
||||||
|
// clear temp directory
|
||||||
|
await io.rmRF(root)
|
||||||
|
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-c'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
await fs.mkdir(path.join(root, 'folder-a', 'folder-b', 'folder-e'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
await fs.mkdir(path.join(root, 'folder-d'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
await fs.mkdir(path.join(root, 'folder-f'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
await fs.mkdir(path.join(root, 'folder-g'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
await fs.mkdir(path.join(root, 'folder-h', 'folder-i'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
await fs.mkdir(path.join(root, 'folder-h', 'folder-j', 'folder-k'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
|
||||||
|
await fs.writeFile(searchItem1Path, 'search item1 file')
|
||||||
|
await fs.writeFile(searchItem2Path, 'search item2 file')
|
||||||
|
await fs.writeFile(searchItem3Path, 'search item3 file')
|
||||||
|
await fs.writeFile(searchItem4Path, 'search item4 file')
|
||||||
|
await fs.writeFile(searchItem5Path, 'search item5 file')
|
||||||
|
|
||||||
|
await fs.writeFile(extraSearchItem1Path, 'extraSearch item1 file')
|
||||||
|
await fs.writeFile(extraSearchItem2Path, 'extraSearch item2 file')
|
||||||
|
await fs.writeFile(extraSearchItem3Path, 'extraSearch item3 file')
|
||||||
|
await fs.writeFile(extraSearchItem4Path, 'extraSearch item4 file')
|
||||||
|
await fs.writeFile(extraSearchItem5Path, 'extraSearch item5 file')
|
||||||
|
|
||||||
|
await fs.writeFile(extraFileInFolderCPath, 'extra file')
|
||||||
|
|
||||||
|
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
|
||||||
|
|
||||||
|
await fs.writeFile(lonelyFilePath, 'all by itself')
|
||||||
|
/*
|
||||||
|
Directory structure of files that get created:
|
||||||
|
root/
|
||||||
|
folder-a/
|
||||||
|
folder-b/
|
||||||
|
folder-c/
|
||||||
|
search-item1.txt
|
||||||
|
extraSearch-item1.txt
|
||||||
|
extra-file-in-folder-c.txt
|
||||||
|
folder-e/
|
||||||
|
folder-d/
|
||||||
|
search-item2.txt
|
||||||
|
search-item3.txt
|
||||||
|
search-item4.txt
|
||||||
|
extraSearch-item2.txt
|
||||||
|
folder-f/
|
||||||
|
extraSearch-item3.txt
|
||||||
|
folder-g/
|
||||||
|
folder-h/
|
||||||
|
amazing-item.txt
|
||||||
|
folder-i/
|
||||||
|
extraSearch-item4.txt
|
||||||
|
extraSearch-item5.txt
|
||||||
|
folder-j/
|
||||||
|
folder-k/
|
||||||
|
lonely-file.txt
|
||||||
|
search-item5.txt
|
||||||
|
*/
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Single file search - Absolute Path', async () => {
|
||||||
|
const searchResult = await findFilesToUpload(extraFileInFolderCPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(1)
|
||||||
|
expect(searchResult.filesToUpload[0]).toEqual(extraFileInFolderCPath)
|
||||||
|
expect(searchResult.rootDirectory).toEqual(
|
||||||
|
path.join(root, 'folder-a', 'folder-b', 'folder-c')
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Single file search - Relative Path', async () => {
|
||||||
|
const relativePath = path.join(
|
||||||
|
'__tests__',
|
||||||
|
'_temp',
|
||||||
|
'search',
|
||||||
|
'folder-a',
|
||||||
|
'folder-b',
|
||||||
|
'folder-c',
|
||||||
|
'search-item1.txt'
|
||||||
|
)
|
||||||
|
|
||||||
|
const searchResult = await findFilesToUpload(relativePath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(1)
|
||||||
|
expect(searchResult.filesToUpload[0]).toEqual(searchItem1Path)
|
||||||
|
expect(searchResult.rootDirectory).toEqual(
|
||||||
|
path.join(root, 'folder-a', 'folder-b', 'folder-c')
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Single file using wildcard', async () => {
|
||||||
|
const expectedRoot = path.join(root, 'folder-h')
|
||||||
|
const searchPath = path.join(root, 'folder-h', '**/*lonely*')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(1)
|
||||||
|
expect(searchResult.filesToUpload[0]).toEqual(lonelyFilePath)
|
||||||
|
expect(searchResult.rootDirectory).toEqual(expectedRoot)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Single file using directory', async () => {
|
||||||
|
const searchPath = path.join(root, 'folder-h', 'folder-j')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(1)
|
||||||
|
expect(searchResult.filesToUpload[0]).toEqual(lonelyFilePath)
|
||||||
|
expect(searchResult.rootDirectory).toEqual(searchPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Directory search - Absolute Path', async () => {
|
||||||
|
const searchPath = path.join(root, 'folder-h')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(4)
|
||||||
|
|
||||||
|
expect(
|
||||||
|
searchResult.filesToUpload.includes(amazingFileInFolderHPath)
|
||||||
|
).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem4Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem5Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(searchPath)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Directory search - Relative Path', async () => {
|
||||||
|
const searchPath = path.join('__tests__', '_temp', 'search', 'folder-h')
|
||||||
|
const expectedRootDirectory = path.join(root, 'folder-h')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(4)
|
||||||
|
|
||||||
|
expect(
|
||||||
|
searchResult.filesToUpload.includes(amazingFileInFolderHPath)
|
||||||
|
).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem4Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem5Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(expectedRootDirectory)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Wildcard search - Absolute Path', async () => {
|
||||||
|
const searchPath = path.join(root, '**/*[Ss]earch*')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(10)
|
||||||
|
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem5Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem1Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem2Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem3Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem4Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem5Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(root)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Wildcard search - Relative Path', async () => {
|
||||||
|
const searchPath = path.join(
|
||||||
|
'__tests__',
|
||||||
|
'_temp',
|
||||||
|
'search',
|
||||||
|
'**/*[Ss]earch*'
|
||||||
|
)
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(10)
|
||||||
|
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem5Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem1Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem2Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem3Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem4Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem5Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(root)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Multi path search - root directory', async () => {
|
||||||
|
const searchPath1 = path.join(root, 'folder-a')
|
||||||
|
const searchPath2 = path.join(root, 'folder-d')
|
||||||
|
|
||||||
|
const searchPaths = searchPath1 + '\n' + searchPath2
|
||||||
|
const searchResult = await findFilesToUpload(searchPaths)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(root)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(7)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem1Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem2Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraFileInFolderCPath)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Multi path search - with exclude character', async () => {
|
||||||
|
const searchPath1 = path.join(root, 'folder-a')
|
||||||
|
const searchPath2 = path.join(root, 'folder-d')
|
||||||
|
const searchPath3 = path.join(root, 'folder-a', 'folder-b', '**/extra*.txt')
|
||||||
|
|
||||||
|
// negating the third search path
|
||||||
|
const searchPaths = searchPath1 + '\n' + searchPath2 + '\n!' + searchPath3
|
||||||
|
const searchResult = await findFilesToUpload(searchPaths)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(root)
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(5)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem2Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Multi path search - non root directory', async () => {
|
||||||
|
const searchPath1 = path.join(root, 'folder-h', 'folder-i')
|
||||||
|
const searchPath2 = path.join(root, 'folder-h', 'folder-j', 'folder-k')
|
||||||
|
const searchPath3 = amazingFileInFolderHPath
|
||||||
|
|
||||||
|
const searchPaths = [searchPath1, searchPath2, searchPath3].join('\n')
|
||||||
|
const searchResult = await findFilesToUpload(searchPaths)
|
||||||
|
|
||||||
|
expect(searchResult.rootDirectory).toEqual(path.join(root, 'folder-h'))
|
||||||
|
expect(searchResult.filesToUpload.length).toEqual(4)
|
||||||
|
expect(
|
||||||
|
searchResult.filesToUpload.includes(amazingFileInFolderHPath)
|
||||||
|
).toEqual(true)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem4Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(extraSearchItem5Path)).toEqual(
|
||||||
|
true
|
||||||
|
)
|
||||||
|
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
||||||
|
})
|
||||||
|
})
|
23
action.yml
23
action.yml
|
@ -4,10 +4,25 @@ author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
name:
|
name:
|
||||||
description: 'Artifact name'
|
description: 'Artifact name'
|
||||||
required: true
|
default: 'artifact'
|
||||||
path:
|
path:
|
||||||
description: 'Directory containing files to upload'
|
description: 'A file, directory or wildcard pattern that describes what to upload'
|
||||||
required: true
|
required: true
|
||||||
|
if-no-files-found:
|
||||||
|
description: >
|
||||||
|
The desired behavior if no files are found using the provided path.
|
||||||
|
|
||||||
|
Available Options:
|
||||||
|
warn: Output a warning but do not fail the action
|
||||||
|
error: Fail the action with an error message
|
||||||
|
ignore: Do not output any warnings or errors, the action does not fail
|
||||||
|
default: 'warn'
|
||||||
|
retention-days:
|
||||||
|
description: >
|
||||||
|
Duration after which artifact will expire in days. 0 means using default retention.
|
||||||
|
|
||||||
|
Minimum 1 day.
|
||||||
|
Maximum 90 days unless changed from the repository settings page.
|
||||||
runs:
|
runs:
|
||||||
# Plugins live on the runner and are only available to a certain set of first party actions.
|
using: 'node16'
|
||||||
plugin: 'publish'
|
main: 'dist/index.js'
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,12 @@
|
||||||
|
module.exports = {
|
||||||
|
clearMocks: true,
|
||||||
|
moduleFileExtensions: ['js', 'ts'],
|
||||||
|
roots: ['<rootDir>'],
|
||||||
|
testEnvironment: 'node',
|
||||||
|
testMatch: ['**/*.test.ts'],
|
||||||
|
testRunner: 'jest-circus/runner',
|
||||||
|
transform: {
|
||||||
|
'^.+\\.ts$': 'ts-jest'
|
||||||
|
},
|
||||||
|
verbose: true
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,53 @@
|
||||||
|
{
|
||||||
|
"name": "upload-artifact",
|
||||||
|
"version": "3.0.0",
|
||||||
|
"description": "Upload a build artifact that can be used by subsequent workflow steps",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"release": "ncc build src/upload-artifact.ts && git add -f dist/",
|
||||||
|
"check-all": "concurrently \"npm:format-check\" \"npm:lint\" \"npm:test\" \"npm:build\"",
|
||||||
|
"format": "prettier --write **/*.ts",
|
||||||
|
"format-check": "prettier --check **/*.ts",
|
||||||
|
"lint": "eslint **/*.ts",
|
||||||
|
"test": "jest --testTimeout 10000"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/actions/upload-artifact.git"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"Actions",
|
||||||
|
"GitHub",
|
||||||
|
"Artifacts",
|
||||||
|
"Upload"
|
||||||
|
],
|
||||||
|
"author": "GitHub",
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/actions/upload-artifact/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||||
|
"dependencies": {
|
||||||
|
"@actions/artifact": "^1.1.2",
|
||||||
|
"@actions/core": "^1.10.0",
|
||||||
|
"@actions/glob": "^0.3.0",
|
||||||
|
"@actions/io": "^1.1.2"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/jest": "^29.2.5",
|
||||||
|
"@types/node": "^18.11.18",
|
||||||
|
"@typescript-eslint/parser": "^5.48.0",
|
||||||
|
"@vercel/ncc": "^0.36.0",
|
||||||
|
"concurrently": "^7.6.0",
|
||||||
|
"eslint": "^8.31.0",
|
||||||
|
"eslint-plugin-github": "^4.6.0",
|
||||||
|
"eslint-plugin-jest": "^27.2.0",
|
||||||
|
"glob": "^8.0.3",
|
||||||
|
"jest": "^29.3.1",
|
||||||
|
"jest-circus": "^29.3.1",
|
||||||
|
"prettier": "^2.8.1",
|
||||||
|
"ts-jest": "^29.0.3",
|
||||||
|
"typescript": "^4.9.4"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,24 @@
|
||||||
|
/* eslint-disable no-unused-vars */
|
||||||
|
export enum Inputs {
|
||||||
|
Name = 'name',
|
||||||
|
Path = 'path',
|
||||||
|
IfNoFilesFound = 'if-no-files-found',
|
||||||
|
RetentionDays = 'retention-days'
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum NoFileOptions {
|
||||||
|
/**
|
||||||
|
* Default. Output a warning but do not fail the action
|
||||||
|
*/
|
||||||
|
warn = 'warn',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fail the action with an error message
|
||||||
|
*/
|
||||||
|
error = 'error',
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Do not output any warnings or errors, the action does not fail
|
||||||
|
*/
|
||||||
|
ignore = 'ignore'
|
||||||
|
}
|
|
@ -0,0 +1,40 @@
|
||||||
|
import * as core from '@actions/core'
|
||||||
|
import {Inputs, NoFileOptions} from './constants'
|
||||||
|
import {UploadInputs} from './upload-inputs'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to get all the inputs for the action
|
||||||
|
*/
|
||||||
|
export function getInputs(): UploadInputs {
|
||||||
|
const name = core.getInput(Inputs.Name)
|
||||||
|
const path = core.getInput(Inputs.Path, {required: true})
|
||||||
|
|
||||||
|
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
|
||||||
|
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
|
||||||
|
|
||||||
|
if (!noFileBehavior) {
|
||||||
|
core.setFailed(
|
||||||
|
`Unrecognized ${
|
||||||
|
Inputs.IfNoFilesFound
|
||||||
|
} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(
|
||||||
|
NoFileOptions
|
||||||
|
)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const inputs = {
|
||||||
|
artifactName: name,
|
||||||
|
searchPath: path,
|
||||||
|
ifNoFilesFound: noFileBehavior
|
||||||
|
} as UploadInputs
|
||||||
|
|
||||||
|
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||||
|
if (retentionDaysStr) {
|
||||||
|
inputs.retentionDays = parseInt(retentionDaysStr)
|
||||||
|
if (isNaN(inputs.retentionDays)) {
|
||||||
|
core.setFailed('Invalid retention-days')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return inputs
|
||||||
|
}
|
|
@ -0,0 +1,157 @@
|
||||||
|
import * as glob from '@actions/glob'
|
||||||
|
import * as path from 'path'
|
||||||
|
import {debug, info} from '@actions/core'
|
||||||
|
import {stat} from 'fs'
|
||||||
|
import {dirname} from 'path'
|
||||||
|
import {promisify} from 'util'
|
||||||
|
const stats = promisify(stat)
|
||||||
|
|
||||||
|
export interface SearchResult {
|
||||||
|
filesToUpload: string[]
|
||||||
|
rootDirectory: string
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDefaultGlobOptions(): glob.GlobOptions {
|
||||||
|
return {
|
||||||
|
followSymbolicLinks: true,
|
||||||
|
implicitDescendants: true,
|
||||||
|
omitBrokenSymbolicLinks: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as
|
||||||
|
* the delimiter to control the directory structure for the artifact. This function returns the LCA
|
||||||
|
* when given an array of search paths
|
||||||
|
*
|
||||||
|
* Example 1: The patterns `/foo/` and `/bar/` returns `/`
|
||||||
|
*
|
||||||
|
* Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo`
|
||||||
|
*/
|
||||||
|
function getMultiPathLCA(searchPaths: string[]): string {
|
||||||
|
if (searchPaths.length < 2) {
|
||||||
|
throw new Error('At least two search paths must be provided')
|
||||||
|
}
|
||||||
|
|
||||||
|
const commonPaths = new Array<string>()
|
||||||
|
const splitPaths = new Array<string[]>()
|
||||||
|
let smallestPathLength = Number.MAX_SAFE_INTEGER
|
||||||
|
|
||||||
|
// split each of the search paths using the platform specific separator
|
||||||
|
for (const searchPath of searchPaths) {
|
||||||
|
debug(`Using search path ${searchPath}`)
|
||||||
|
|
||||||
|
const splitSearchPath = path.normalize(searchPath).split(path.sep)
|
||||||
|
|
||||||
|
// keep track of the smallest path length so that we don't accidentally later go out of bounds
|
||||||
|
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length)
|
||||||
|
splitPaths.push(splitSearchPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
|
||||||
|
if (searchPaths[0].startsWith(path.sep)) {
|
||||||
|
commonPaths.push(path.sep)
|
||||||
|
}
|
||||||
|
|
||||||
|
let splitIndex = 0
|
||||||
|
// function to check if the paths are the same at a specific index
|
||||||
|
function isPathTheSame(): boolean {
|
||||||
|
const compare = splitPaths[0][splitIndex]
|
||||||
|
for (let i = 1; i < splitPaths.length; i++) {
|
||||||
|
if (compare !== splitPaths[i][splitIndex]) {
|
||||||
|
// a non-common index has been reached
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||||
|
while (splitIndex < smallestPathLength) {
|
||||||
|
if (!isPathTheSame()) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
// if all are the same, add to the end result & increment the index
|
||||||
|
commonPaths.push(splitPaths[0][splitIndex])
|
||||||
|
splitIndex++
|
||||||
|
}
|
||||||
|
return path.join(...commonPaths)
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function findFilesToUpload(
|
||||||
|
searchPath: string,
|
||||||
|
globOptions?: glob.GlobOptions
|
||||||
|
): Promise<SearchResult> {
|
||||||
|
const searchResults: string[] = []
|
||||||
|
const globber = await glob.create(
|
||||||
|
searchPath,
|
||||||
|
globOptions || getDefaultGlobOptions()
|
||||||
|
)
|
||||||
|
const rawSearchResults: string[] = await globber.glob()
|
||||||
|
|
||||||
|
/*
|
||||||
|
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||||
|
Detect any files that could be overwritten for user awareness
|
||||||
|
*/
|
||||||
|
const set = new Set<string>()
|
||||||
|
|
||||||
|
/*
|
||||||
|
Directories will be rejected if attempted to be uploaded. This includes just empty
|
||||||
|
directories so filter any directories out from the raw search results
|
||||||
|
*/
|
||||||
|
for (const searchResult of rawSearchResults) {
|
||||||
|
const fileStats = await stats(searchResult)
|
||||||
|
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||||
|
if (!fileStats.isDirectory()) {
|
||||||
|
debug(`File:${searchResult} was found using the provided searchPath`)
|
||||||
|
searchResults.push(searchResult)
|
||||||
|
|
||||||
|
// detect any files that would be overwritten because of case insensitivity
|
||||||
|
if (set.has(searchResult.toLowerCase())) {
|
||||||
|
info(
|
||||||
|
`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
set.add(searchResult.toLowerCase())
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
debug(
|
||||||
|
`Removing ${searchResult} from rawSearchResults because it is a directory`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate the root directory for the artifact using the search paths that were utilized
|
||||||
|
const searchPaths: string[] = globber.getSearchPaths()
|
||||||
|
|
||||||
|
if (searchPaths.length > 1) {
|
||||||
|
info(
|
||||||
|
`Multiple search paths detected. Calculating the least common ancestor of all paths`
|
||||||
|
)
|
||||||
|
const lcaSearchPath = getMultiPathLCA(searchPaths)
|
||||||
|
info(
|
||||||
|
`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
filesToUpload: searchResults,
|
||||||
|
rootDirectory: lcaSearchPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is
|
||||||
|
not preserved and the root directory will be the single files parent directory
|
||||||
|
*/
|
||||||
|
if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) {
|
||||||
|
return {
|
||||||
|
filesToUpload: searchResults,
|
||||||
|
rootDirectory: dirname(searchResults[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
filesToUpload: searchResults,
|
||||||
|
rootDirectory: searchPaths[0]
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,76 @@
|
||||||
|
import * as core from '@actions/core'
|
||||||
|
import {create, UploadOptions} from '@actions/artifact'
|
||||||
|
import {findFilesToUpload} from './search'
|
||||||
|
import {getInputs} from './input-helper'
|
||||||
|
import {NoFileOptions} from './constants'
|
||||||
|
|
||||||
|
async function run(): Promise<void> {
|
||||||
|
try {
|
||||||
|
const inputs = getInputs()
|
||||||
|
const searchResult = await findFilesToUpload(inputs.searchPath)
|
||||||
|
if (searchResult.filesToUpload.length === 0) {
|
||||||
|
// No files were found, different use cases warrant different types of behavior if nothing is found
|
||||||
|
switch (inputs.ifNoFilesFound) {
|
||||||
|
case NoFileOptions.warn: {
|
||||||
|
core.warning(
|
||||||
|
`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`
|
||||||
|
)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case NoFileOptions.error: {
|
||||||
|
core.setFailed(
|
||||||
|
`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`
|
||||||
|
)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
case NoFileOptions.ignore: {
|
||||||
|
core.info(
|
||||||
|
`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`
|
||||||
|
)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const s = searchResult.filesToUpload.length === 1 ? '' : 's'
|
||||||
|
core.info(
|
||||||
|
`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`
|
||||||
|
)
|
||||||
|
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`)
|
||||||
|
|
||||||
|
if (searchResult.filesToUpload.length > 10000) {
|
||||||
|
core.warning(
|
||||||
|
`There are over 10,000 files in this artifact, consider creating an archive before upload to improve the upload performance.`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const artifactClient = create()
|
||||||
|
const options: UploadOptions = {
|
||||||
|
continueOnError: false
|
||||||
|
}
|
||||||
|
if (inputs.retentionDays) {
|
||||||
|
options.retentionDays = inputs.retentionDays
|
||||||
|
}
|
||||||
|
|
||||||
|
const uploadResponse = await artifactClient.uploadArtifact(
|
||||||
|
inputs.artifactName,
|
||||||
|
searchResult.filesToUpload,
|
||||||
|
searchResult.rootDirectory,
|
||||||
|
options
|
||||||
|
)
|
||||||
|
|
||||||
|
if (uploadResponse.failedItems.length > 0) {
|
||||||
|
core.setFailed(
|
||||||
|
`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
core.info(
|
||||||
|
`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
core.setFailed((error as Error).message)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
run()
|
|
@ -0,0 +1,23 @@
|
||||||
|
import {NoFileOptions} from './constants'
|
||||||
|
|
||||||
|
export interface UploadInputs {
|
||||||
|
/**
|
||||||
|
* The name of the artifact that will be uploaded
|
||||||
|
*/
|
||||||
|
artifactName: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The search path used to describe what to upload as part of the artifact
|
||||||
|
*/
|
||||||
|
searchPath: string
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The desired behavior if no files are found with the provided search path
|
||||||
|
*/
|
||||||
|
ifNoFilesFound: NoFileOptions
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Duration after which artifact will expire in days
|
||||||
|
*/
|
||||||
|
retentionDays: number
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "es6",
|
||||||
|
"module": "commonjs",
|
||||||
|
"outDir": "./lib",
|
||||||
|
"rootDir": "./src",
|
||||||
|
"strict": true,
|
||||||
|
"noImplicitAny": false,
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"allowSyntheticDefaultImports": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"declaration": false,
|
||||||
|
"sourceMap": true,
|
||||||
|
"lib": ["es6"]
|
||||||
|
},
|
||||||
|
"exclude": ["node_modules", "**/*.test.ts"]
|
||||||
|
}
|
Loading…
Reference in New Issue