Compare commits

...

18 Commits

Author SHA1 Message Date
c3ac5dd0ed Revert "Add support for Oracle JDK (#401)" (#421)
This reverts commit 6cdf39a6b6.

Co-authored-by: Brian Cristante <33549821+brcrista@users.noreply.github.com>
2022-12-05 17:45:21 +01:00
dcd29da2fa Fix typo in README.md (#419)
`diractory` -> `directory`
2022-12-05 10:25:33 +01:00
19eeec562b Update to latest actions/publish-action (#411) 2022-11-23 17:28:23 +01:00
bd7e5d28eb Update minimatch to 3.1.2 (#413) 2022-11-23 15:24:31 +01:00
6cdf39a6b6 Add support for Oracle JDK (#401) 2022-11-23 13:28:51 +01:00
7db6b4554c Eclipse Temurin instead of Adopt OpenJDK (#398) 2022-11-18 09:35:24 +01:00
bf2f02c4a7 Pass the token input through on GHES for Microsoft Build of OpenJDK (#395) 2022-11-18 09:33:59 +01:00
de1bb2b0c5 feat: support Gradle version catalog (#394) 2022-10-17 18:34:41 +02:00
2c53c1a588 Update actions/cache to 3.0.4 version (#392) 2022-10-17 12:12:57 +02:00
3617c43588 Default to runner architecture (#376) 2022-10-11 01:47:17 +02:00
a82e6d0020 Update README.md (#391) 2022-10-10 15:28:56 +02:00
fbb269240e Merge pull request #390 from rentziass/rentziass/update-actions-core
Update @actions/core to 1.10.0
2022-10-07 13:50:24 +02:00
dfcd06a4b9 Update @actions/core to 1.10.0 2022-10-06 12:11:13 +01:00
e150063ee4 Merge pull request #282 from Okeanos/maven-toolchains-support
Add Maven Toolchains Declaration (#276)
2022-09-28 15:22:23 +02:00
eb1418aa81 Add Maven Toolchains Declaration (#276)
* Add (optional) Maven Toolchains Declaration after JDK is installed
* Extract common/shared Maven constants

Resolves #276
2022-09-27 17:25:52 +02:00
499ae9c28b Merge pull request #387 from akv-platform/v-sdolin/issue-382-docs2
Add inputs documentation to README
2022-09-27 09:25:48 +02:00
cdc5f296eb Fix cross-refernces 2022-09-22 15:55:15 +02:00
b80173ab1e Add inputs documentation to README 2022-09-22 09:11:29 +02:00
33 changed files with 1592 additions and 372 deletions

View File

@ -22,7 +22,7 @@ jobs:
steps:
- name: Update the ${{ env.TAG_NAME }} tag
id: update-major-tag
uses: actions/publish-action@v0.1.0
uses: actions/publish-action@v0.2.1
with:
source-tag: ${{ env.TAG_NAME }}
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}
slack-webhook: ${{ secrets.SLACK_WEBHOOK }}

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -12,6 +12,7 @@ The `setup-java` action provides the following functionality for GitHub Actions
- Caching dependencies managed by Apache Maven
- Caching dependencies managed by Gradle
- Caching dependencies managed by sbt
- [Maven Toolchains declaration](https://maven.apache.org/guides/mini/guide-using-toolchains.html) for specified JDK versions
This action allows you to work with Java and Scala projects.
@ -22,7 +23,40 @@ This action allows you to work with Java and Scala projects.
## Usage
Inputs `java-version` and `distribution` are mandatory. See [Supported distributions](#supported-distributions) section for a list of available options.
- `java-version`: _(required)_ The Java version to set up. Takes a whole or [semver](#supported-version-syntax) Java version.
- `distribution`: _(required)_ Java [distribution](#supported-distributions).
- `java-package`: The packaging variant of the choosen distribution. Possible values: `jdk`, `jre`, `jdk+fx`, `jre+fx`. Default value: `jdk`.
- `architecture`: The target architecture of the package. Possible values: `x86`, `x64`, `armv7`, `aarch64`, `ppc64le`. Default value: `x64`.
- `jdkFile`: If a use-case requires a custom distribution setup-java uses the compressed JDK from the location pointed by this input and will take care of the installation and caching on the VM.
- `check-latest`: Setting this option makes the action to check for the latest available version for the version spec.
- `cache`: Quick [setup caching](#caching-packages-dependencies) for the dependencies managed through one of the predifined package managers. It can be one of "maven", "gradle" or "sbt".
#### Maven options
The action has a bunch of inputs to generate maven's [settings.xml](https://maven.apache.org/settings.html) on the fly and pass the values to Apache Maven GPG Plugin as well as Apache Maven Toolchains. See [advanced usage](docs/advanced-usage.md) for more.
- `overwrite-settings`: By default action overwrites the settings.xml. In order to skip generation of file if it exists set this to `false`.
- `server-id`: ID of the distributionManagement repository in the pom.xml file. Default is `github`.
- `server-username`: Environment variable name for the username for authentication to the Apache Maven repository. Default is GITHUB_ACTOR.
- `server-password`: Environment variable name for password or token for authentication to the Apache Maven repository. Default is GITHUB_TOKEN.
- `settings-path`: Maven related setting to point to the directory where the settings.xml file will be written. Default is ~/.m2.
- `gpg-private-key`: GPG private key to import. Default is empty string.'
- `gpg-passphrase`: description: 'Environment variable name for the GPG private key passphrase. Default is GPG_PASSPHRASE.
- `mvn-toolchain-id`: Name of Maven Toolchain ID if the default name of `${distribution}_${java-version}` is not wanted.
- `mvn-toolchain-vendor`: Name of Maven Toolchain Vendor if the default name of `${distribution}` is not wanted.
### Basic Configuration
@ -74,7 +108,7 @@ Currently, the following distributions are supported:
### Caching packages dependencies
The action has a built-in functionality for caching and restoring dependencies. It uses [actions/cache](https://github.com/actions/cache) under hood for caching dependencies but requires less configuration settings. Supported package managers are gradle, maven and sbt. The format of the used cache key is `setup-java-${{ platform }}-${{ packageManager }}-${{ fileHash }}`, where the hash is based on the following files:
- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/Versions.kt`, `buildSrc/**/Dependencies.kt`
- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/Versions.kt`, `buildSrc/**/Dependencies.kt`, and `gradle/*.versions.toml`
- maven: `**/pom.xml`
- sbt: all sbt build definition files `**/*.sbt`, `**/project/build.properties`, `**/project/**.{scala,sbt}`
@ -134,7 +168,7 @@ steps:
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: 'adopt'
distribution: 'temurin'
java-version: '17'
check-latest: true
- run: java HelloWorldApp.java
@ -174,7 +208,11 @@ All versions are added to the PATH. The last version will be used and available
15
```
### Using Maven Toolchains
In the example above multiple JDKs are installed for the same job. The result after the last JDK is installed is a Maven Toolchains declaration containing references to all three JDKs. The values for `id`, `version`, and `vendor` of the individual Toolchain entries are the given input values for `distribution` and `java-version` (`vendor` being the combination of `${distribution}_${java-version}`) by default.
### Advanced Configuration
- [Selecting a Java distribution](docs/advanced-usage.md#Selecting-a-Java-distribution)
- [Eclipse Temurin](docs/advanced-usage.md#Eclipse-Temurin)
- [Adopt](docs/advanced-usage.md#Adopt)
@ -190,6 +228,7 @@ All versions are added to the PATH. The last version will be used and available
- [Publishing using Apache Maven](docs/advanced-usage.md#Publishing-using-Apache-Maven)
- [Publishing using Gradle](docs/advanced-usage.md#Publishing-using-Gradle)
- [Hosted Tool Cache](docs/advanced-usage.md#Hosted-Tool-Cache)
- [Modifying Maven Toolchains](docs/advanced-usage.md#Modifying-Maven-Toolchains)
## License
@ -197,4 +236,4 @@ The scripts and documentation in this project are released under the [MIT Licens
## Contributions
Contributions are welcome! See [Contributor's Guide](docs/contributors.md)
Contributions are welcome! See [Contributor's Guide](docs/contributors.md)

View File

@ -5,9 +5,10 @@ import * as core from '@actions/core';
import os from 'os';
import * as auth from '../src/auth';
import { M2_DIR, MVN_SETTINGS_FILE } from '../src/constants';
const m2Dir = path.join(__dirname, auth.M2_DIR);
const settingsFile = path.join(m2Dir, auth.SETTINGS_FILE);
const m2Dir = path.join(__dirname, M2_DIR);
const settingsFile = path.join(m2Dir, MVN_SETTINGS_FILE);
describe('auth tests', () => {
let spyOSHomedir: jest.SpyInstance;
@ -38,7 +39,7 @@ describe('auth tests', () => {
const password = 'TOLKIEN';
const altHome = path.join(__dirname, 'runner', 'settings');
const altSettingsFile = path.join(altHome, auth.SETTINGS_FILE);
const altSettingsFile = path.join(altHome, MVN_SETTINGS_FILE);
await io.rmRF(altHome); // ensure it doesn't already exist
await auth.createAuthenticationSettings(id, username, password, altHome, true);

View File

@ -98,7 +98,7 @@ describe('dependency cache', () => {
await expect(restore('gradle')).rejects.toThrowError(
`No file in ${projectRoot(
workspace
)} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/Versions.kt,buildSrc/**/Dependencies.kt], make sure you have checked out the target repository`
)} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/Versions.kt,buildSrc/**/Dependencies.kt,gradle/*.versions.toml], make sure you have checked out the target repository`
);
});
it('downloads cache based on build.gradle', async () => {
@ -112,6 +112,15 @@ describe('dependency cache', () => {
it('downloads cache based on build.gradle.kts', async () => {
createFile(join(workspace, 'build.gradle.kts'));
await restore('gradle');
expect(spyCacheRestore).toBeCalled();
expect(spyWarning).not.toBeCalled();
expect(spyInfo).toBeCalledWith('gradle cache is not found');
});
it('downloads cache based on libs.versions.toml', async () => {
createDirectory(join(workspace, 'gradle'));
createFile(join(workspace, 'gradle', 'libs.versions.toml'));
await restore('gradle');
expect(spyCacheRestore).toBeCalled();
expect(spyWarning).not.toBeCalled();

View File

@ -3,6 +3,8 @@ import { HttpClient } from '@actions/http-client';
import { AdoptDistribution, AdoptImplementation } from '../../src/distributions/adopt/installer';
import { JavaInstallerOptions } from '../../src/distributions/base-models';
import os from 'os';
let manifestData = require('../data/adopt.json') as [];
describe('getAvailableVersions', () => {
@ -128,6 +130,35 @@ describe('getAvailableVersions', () => {
expect(distribution.toolcacheFolderName).toBe(expected);
}
);
it.each([
['amd64', 'x64'],
['arm64', 'aarch64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const installerOptions: JavaInstallerOptions = {
version: '17',
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
};
const expectedParameters = `os=mac&architecture=${distroArch}&image_type=jdk&release_type=ga&jvm_impl=hotspot&page_size=20&page=0`;
const distribution = new AdoptDistribution(installerOptions, AdoptImplementation.Hotspot);
const baseUrl = 'https://api.adoptopenjdk.net/v3/assets/version/%5B1.0,100.0%5D';
const expectedUrl = `${baseUrl}?project=jdk&vendor=adoptopenjdk&heap_size=normal&sort_method=DEFAULT&sort_order=DESC&${expectedParameters}`;
distribution['getPlatformOption'] = () => 'mac';
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(expectedUrl);
}
);
});
describe('findPackageForDownload', () => {

View File

@ -12,6 +12,8 @@ import {
JavaInstallerResults
} from '../../src/distributions/base-models';
import os from 'os';
class EmptyJavaBase extends JavaBase {
constructor(installerOptions: JavaInstallerOptions) {
super('Empty', installerOptions);
@ -192,6 +194,8 @@ describe('setupJava', () => {
spyCoreSetOutput = jest.spyOn(core, 'setOutput');
spyCoreSetOutput.mockImplementation(() => undefined);
jest.spyOn(os, 'arch').mockReturnValue('x86');
});
afterEach(() => {
@ -212,6 +216,10 @@ describe('setupJava', () => {
[
{ version: '11.0.8', architecture: 'x86', packageType: 'jdk', checkLatest: false },
{ version: installedJavaVersion, path: javaPath }
],
[
{ version: '11', architecture: '', packageType: 'jdk', checkLatest: false },
{ version: installedJavaVersion, path: javaPath }
]
])('should find java locally for %s', (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);
@ -237,6 +245,10 @@ describe('setupJava', () => {
[
{ version: '11', architecture: 'x64', packageType: 'jre', checkLatest: false },
{ path: path.join('toolcache', 'Java_Empty_jre', '11.0.9', 'x64'), version: '11.0.9' }
],
[
{ version: '11', architecture: '', packageType: 'jre', checkLatest: false },
{ path: path.join('toolcache', 'Java_Empty_jre', '11.0.9', 'x86'), version: '11.0.9' }
]
])('download java with configuration %s', async (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);
@ -245,7 +257,7 @@ describe('setupJava', () => {
expect(spyCoreAddPath).toHaveBeenCalled();
expect(spyCoreExportVariable).toHaveBeenCalled();
expect(spyCoreExportVariable).toHaveBeenCalledWith(
`JAVA_HOME_${input.version}_${input.architecture.toLocaleUpperCase()}`,
`JAVA_HOME_${input.version}_${(input.architecture || 'x86').toLocaleUpperCase()}`,
expected.path
);
expect(spyCoreSetOutput).toHaveBeenCalled();
@ -260,6 +272,10 @@ describe('setupJava', () => {
[
{ version: '11.0.9', architecture: 'x86', packageType: 'jdk', checkLatest: true },
{ version: '11.0.9', path: javaPathInstalled }
],
[
{ version: '11.0.9', architecture: '', packageType: 'jdk', checkLatest: true },
{ version: '11.0.9', path: javaPathInstalled }
]
])('should check the latest java version for %s and resolve locally', async (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);
@ -283,6 +299,10 @@ describe('setupJava', () => {
[
{ version: '11.0.x', architecture: 'x86', packageType: 'jdk', checkLatest: true },
{ version: actualJavaVersion, path: javaPathInstalled }
],
[
{ version: '11', architecture: '', packageType: 'jdk', checkLatest: true },
{ version: actualJavaVersion, path: javaPathInstalled }
]
])('should check the latest java version for %s and download', async (input, expected) => {
mockJavaBase = new EmptyJavaBase(input);

View File

@ -3,6 +3,8 @@ import { JavaInstallerOptions } from '../../src/distributions/base-models';
import { CorrettoDistribution } from '../../src/distributions/corretto/installer';
import * as util from '../../src/util';
import os from 'os';
import { isGeneratorFunction } from 'util/types';
const manifestData = require('../data/corretto.json') as [];
@ -142,6 +144,33 @@ describe('getAvailableVersions', () => {
"Could not find satisfied version for SemVer '4'"
);
});
it.each([
['arm64', 'aarch64'],
['amd64', 'x64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const version = '17';
const installerOptions: JavaInstallerOptions = {
version,
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
};
const distribution = new CorrettoDistribution(installerOptions);
mockPlatform(distribution, 'macos');
const expectedLink = `https://corretto.aws/downloads/resources/17.0.2.8.1/amazon-corretto-17.0.2.8.1-macosx-${distroArch}.tar.gz`;
const availableVersion = await distribution['findPackageForDownload'](version);
expect(availableVersion).not.toBeNull();
expect(availableVersion.url).toBe(expectedLink);
}
);
});
const mockPlatform = (distribution: CorrettoDistribution, platform: string) => {

View File

@ -1,6 +1,7 @@
import { LibericaDistributions } from '../../src/distributions/liberica/installer';
import { ArchitectureOptions, LibericaVersion } from '../../src/distributions/liberica/models';
import { HttpClient } from '@actions/http-client';
import os from 'os';
const manifestData = require('../data/liberica.json') as LibericaVersion[];
@ -61,6 +62,39 @@ describe('getAvailableVersions', () => {
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
});
type DistroArch = {
bitness: string;
arch: string;
};
it.each([
['amd64', { bitness: '64', arch: 'x86' }],
['arm64', { bitness: '64', arch: 'arm' }]
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: DistroArch) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const distribution = new LibericaDistributions({
version: '17',
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
});
const additionalParams =
'&installation-type=archive&fields=downloadUrl%2Cversion%2CfeatureVersion%2CinterimVersion%2C' +
'updateVersion%2CbuildVersion';
distribution['getPlatformOption'] = () => 'macos';
const buildUrl = `https://api.bell-sw.com/v1/liberica/releases?os=macos&bundle-type=jdk&bitness=${distroArch.bitness}&arch=${distroArch.arch}&build-type=all${additionalParams}`;
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
}
);
it('load available versions', async () => {
const distribution = new LibericaDistributions({
version: '11',

View File

@ -1,5 +1,5 @@
import { MicrosoftDistributions } from '../../src/distributions/microsoft/installer';
import * as tc from '@actions/tool-cache';
import os from 'os';
import data from '../../src/distributions/microsoft/microsoft-openjdk-versions.json';
import * as httpm from '@actions/http-client';
import * as core from '@actions/core';
@ -77,6 +77,30 @@ describe('findPackageForDownload', () => {
expect(result.url).toBe(url);
});
it.each([
['amd64', 'x64'],
['arm64', 'aarch64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
jest.spyOn(os, 'platform').mockReturnValue('linux');
const version = '17';
const distro = new MicrosoftDistributions({
version,
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
});
const result = await distro['findPackageForDownload'](version);
const expectedUrl = `https://aka.ms/download-jdk/microsoft-jdk-17.0.3-linux-${distroArch}.tar.gz`;
expect(result.url).toBe(expectedUrl);
}
);
it('should throw an error', async () => {
await expect(distribution['findPackageForDownload']('8')).rejects.toThrow(
/Could not find satisfied version for SemVer */

View File

@ -1,5 +1,5 @@
import { HttpClient } from '@actions/http-client';
import os from 'os';
import {
TemurinDistribution,
TemurinImplementation
@ -109,6 +109,35 @@ describe('getAvailableVersions', () => {
expect(distribution.toolcacheFolderName).toBe(expected);
}
);
it.each([
['amd64', 'x64'],
['arm64', 'aarch64']
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: string) => {
jest.spyOn(os, 'arch').mockReturnValue(distroArch);
const installerOptions: JavaInstallerOptions = {
version: '17',
architecture: '',
packageType: 'jdk',
checkLatest: false
};
const expectedParameters = `os=mac&architecture=${distroArch}&image_type=jdk&release_type=ga&jvm_impl=hotspot&page_size=20&page=0`;
const distribution = new TemurinDistribution(installerOptions, TemurinImplementation.Hotspot);
const baseUrl = 'https://api.adoptium.net/v3/assets/version/%5B1.0,100.0%5D';
const expectedUrl = `${baseUrl}?project=jdk&vendor=adoptium&heap_size=normal&sort_method=DEFAULT&sort_order=DESC&${expectedParameters}`;
distribution['getPlatformOption'] = () => 'mac';
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(expectedUrl);
}
);
});
describe('findPackageForDownload', () => {

View File

@ -3,6 +3,7 @@ import * as semver from 'semver';
import { ZuluDistribution } from '../../src/distributions/zulu/installer';
import { IZuluVersions } from '../../src/distributions/zulu/models';
import * as utils from '../../src/util';
import os from 'os';
const manifestData = require('../data/zulu-releases-default.json') as [];
@ -72,6 +73,34 @@ describe('getAvailableVersions', () => {
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
});
type DistroArch = {
bitness: string;
arch: string;
};
it.each([
['amd64', { bitness: '64', arch: 'x86' }],
['arm64', { bitness: '64', arch: 'arm' }]
])(
'defaults to os.arch(): %s mapped to distro arch: %s',
async (osArch: string, distroArch: DistroArch) => {
jest.spyOn(os, 'arch').mockReturnValue(osArch);
const distribution = new ZuluDistribution({
version: '17',
architecture: '', // to get default value
packageType: 'jdk',
checkLatest: false
});
distribution['getPlatformOption'] = () => 'macos';
const buildUrl = `https://api.azul.com/zulu/download/community/v1.0/bundles/?os=macos&ext=tar.gz&bundle_type=jdk&javafx=false&arch=${distroArch.arch}&hw_bitness=${distroArch.bitness}&release_status=ga`;
await distribution['getAvailableVersions']();
expect(spyHttpClient.mock.calls).toHaveLength(1);
expect(spyHttpClient.mock.calls[0][0]).toBe(buildUrl);
}
);
it('load available versions', async () => {
const distribution = new ZuluDistribution({
version: '11',

View File

@ -0,0 +1,292 @@
import * as fs from 'fs';
import os from 'os';
import * as path from 'path';
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as toolchains from '../src/toolchains';
import { M2_DIR, MVN_TOOLCHAINS_FILE } from '../src/constants';
const m2Dir = path.join(__dirname, M2_DIR);
const toolchainsFile = path.join(m2Dir, MVN_TOOLCHAINS_FILE);
describe('toolchains tests', () => {
let spyOSHomedir: jest.SpyInstance;
let spyInfo: jest.SpyInstance;
beforeEach(async () => {
await io.rmRF(m2Dir);
spyOSHomedir = jest.spyOn(os, 'homedir');
spyOSHomedir.mockReturnValue(__dirname);
spyInfo = jest.spyOn(core, 'info');
spyInfo.mockImplementation(() => null);
}, 300000);
afterAll(async () => {
try {
await io.rmRF(m2Dir);
} catch {
console.log('Failed to remove test directories');
}
jest.resetAllMocks();
jest.clearAllMocks();
jest.restoreAllMocks();
}, 100000);
it('creates toolchains.xml in alternate locations', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const altHome = path.join(__dirname, 'runner', 'toolchains');
const altToolchainsFile = path.join(altHome, MVN_TOOLCHAINS_FILE);
await io.rmRF(altHome); // ensure it doesn't already exist
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: altHome,
overwriteSettings: true
});
expect(fs.existsSync(m2Dir)).toBe(false);
expect(fs.existsSync(toolchainsFile)).toBe(false);
expect(fs.existsSync(altHome)).toBe(true);
expect(fs.existsSync(altToolchainsFile)).toBe(true);
expect(fs.readFileSync(altToolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
);
await io.rmRF(altHome);
}, 100000);
it('creates toolchains.xml with minimal configuration', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const result = `<?xml version="1.0"?>
<toolchains xmlns="https://maven.apache.org/TOOLCHAINS/1.1.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd">
<toolchain>
<type>jdk</type>
<provides>
<version>17</version>
<vendor>Eclipse Temurin</vendor>
<id>temurin_17</id>
</provides>
<configuration>
<jdkHome>/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: m2Dir,
overwriteSettings: true
});
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
);
expect(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
).toEqual(result);
}, 100000);
it('reuses existing toolchains.xml files', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const originalFile = `<toolchains>
<toolchain>
<type>jdk</type>
<provides>
<version>1.6</version>
<vendor>Sun</vendor>
<id>sun_1.6</id>
</provides>
<configuration>
<jdkHome>/opt/jdk/sun/1.6</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
const result = `<?xml version="1.0"?>
<toolchains>
<toolchain>
<type>jdk</type>
<provides>
<version>1.6</version>
<vendor>Sun</vendor>
<id>sun_1.6</id>
</provides>
<configuration>
<jdkHome>/opt/jdk/sun/1.6</jdkHome>
</configuration>
</toolchain>
<toolchain>
<type>jdk</type>
<provides>
<version>17</version>
<vendor>Eclipse Temurin</vendor>
<id>temurin_17</id>
</provides>
<configuration>
<jdkHome>/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
fs.mkdirSync(m2Dir, { recursive: true });
fs.writeFileSync(toolchainsFile, originalFile);
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: m2Dir,
overwriteSettings: true
});
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition(
originalFile,
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
);
expect(
toolchains.generateToolchainDefinition(
originalFile,
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
).toEqual(result);
}, 100000);
it('does not overwrite existing toolchains.xml files', async () => {
const jdkInfo = {
version: '17',
vendor: 'Eclipse Temurin',
id: 'temurin_17',
jdkHome: '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64'
};
const originalFile = `<toolchains>
<toolchain>
<type>jdk</type>
<provides>
<version>1.6</version>
<vendor>Sun</vendor>
<id>sun_1.6</id>
</provides>
<configuration>
<jdkHome>/opt/jdk/sun/1.6</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
fs.mkdirSync(m2Dir, { recursive: true });
fs.writeFileSync(toolchainsFile, originalFile);
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
await toolchains.createToolchainsSettings({
jdkInfo,
settingsDirectory: m2Dir,
overwriteSettings: false
});
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(originalFile);
}, 100000);
it('generates valid toolchains.xml with minimal configuration', () => {
const jdkInfo = {
version: 'JAVA_VERSION',
vendor: 'JAVA_VENDOR',
id: 'VENDOR_VERSION',
jdkHome: 'JAVA_HOME'
};
const expectedToolchains = `<?xml version="1.0"?>
<toolchains xmlns="https://maven.apache.org/TOOLCHAINS/1.1.0"
xmlns:xsi="https://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd">
<toolchain>
<type>jdk</type>
<provides>
<version>${jdkInfo.version}</version>
<vendor>${jdkInfo.vendor}</vendor>
<id>${jdkInfo.id}</id>
</provides>
<configuration>
<jdkHome>${jdkInfo.jdkHome}</jdkHome>
</configuration>
</toolchain>
</toolchains>`;
expect(
toolchains.generateToolchainDefinition(
'',
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
)
).toEqual(expectedToolchains);
}, 100000);
it('creates toolchains.xml with correct id when none is supplied', async () => {
const version = '17';
const distributionName = 'temurin';
const id = 'temurin_17';
const jdkHome = '/opt/hostedtoolcache/Java_Temurin-Hotspot_jdk/17.0.1-12/x64';
await toolchains.configureToolchains(version, distributionName, jdkHome, undefined);
expect(fs.existsSync(m2Dir)).toBe(true);
expect(fs.existsSync(toolchainsFile)).toBe(true);
expect(fs.readFileSync(toolchainsFile, 'utf-8')).toEqual(
toolchains.generateToolchainDefinition('', version, distributionName, id, jdkHome)
);
}, 100000);
});

View File

@ -14,9 +14,8 @@ inputs:
required: false
default: 'jdk'
architecture:
description: 'The architecture of the package'
description: "The architecture of the package (defaults to the action runner's architecture)"
required: false
default: 'x64'
jdkFile:
description: 'Path to where the compressed JDK is located'
required: false
@ -60,8 +59,14 @@ inputs:
description: 'Workaround to pass job status to post job step. This variable is not intended for manual setting'
default: ${{ job.status }}
token:
description: Used to pull java versions from setup-java. Since there is a default value, token is typically not supplied by the user.
default: ${{ github.token }}
description: The token used to authenticate when fetching version manifests hosted on github.com, such as for the Microsoft Build of OpenJDK. When running this action on github.com, the default value is sufficient. When running on GHES, you can pass a personal access token for github.com if you are experiencing rate limiting.
default: ${{ github.server_url == 'https://github.com' && github.token || '' }}
mvn-toolchain-id:
description: 'Name of Maven Toolchain ID if the default name of "${distribution}_${java-version}" is not wanted. See examples of supported syntax in Advanced Usage file'
required: false
mvn-toolchain-vendor:
description: 'Name of Maven Toolchain Vendor if the default name of "${distribution}" is not wanted. See examples of supported syntax in Advanced Usage file'
required: false
outputs:
distribution:
description: 'Distribution of Java that has been installed'

357
dist/cleanup/index.js vendored
View File

@ -525,7 +525,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const requestUtils_1 = __nccwpck_require__(3981);
const abort_controller_1 = __nccwpck_require__(2557);
/**
* Pipes the body of a HTTP response to a stream
*
@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map
/***/ }),
@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147);
const path = __importStar(__nccwpck_require__(1017));
const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840);
const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) {
@ -1091,26 +1118,43 @@ function getWorkingDirectory() {
var _a;
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
}
// Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
default:
return ['-z'];
}
}
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const args = [
...getCompressionProgram(compressionMethod),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
...getCompressionProgram(compressionMethod),
'-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P',
@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default:
return ['-z'];
}
@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
});
}
exports.createTar = createTar;
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// --d: Decompress.
// --long=#: Enables long distance matching with # bits.
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
//# sourceMappingURL=tar.js.map
/***/ }),
@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) {
const result = {
useAzureSdk: true,
downloadConcurrency: 8,
timeoutInMs: 30000
timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs;
}
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
if (segmentDownloadTimeoutMins &&
!isNaN(Number(segmentDownloadTimeoutMins)) &&
isFinite(Number(segmentDownloadTimeoutMins))) {
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
}
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result;
}
exports.getDownloadOptions = getDownloadOptions;
@ -4831,7 +4865,6 @@ const file_command_1 = __nccwpck_require__(717);
const utils_1 = __nccwpck_require__(5278);
const os = __importStar(__nccwpck_require__(2037));
const path = __importStar(__nccwpck_require__(1017));
const uuid_1 = __nccwpck_require__(8974);
const oidc_utils_1 = __nccwpck_require__(8041);
/**
* The code to exit an action
@ -4861,20 +4894,9 @@ function exportVariable(name, val) {
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
if (name.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedVal.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
}
command_1.issueCommand('set-env', { name }, convertedVal);
}
exports.exportVariable = exportVariable;
/**
@ -4892,7 +4914,7 @@ exports.setSecret = setSecret;
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
file_command_1.issueFileCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
@ -4932,7 +4954,10 @@ function getMultilineInput(name, options) {
const inputs = getInput(name, options)
.split('\n')
.filter(x => x !== '');
return inputs;
if (options && options.trimWhitespace === false) {
return inputs;
}
return inputs.map(input => input.trim());
}
exports.getMultilineInput = getMultilineInput;
/**
@ -4965,8 +4990,12 @@ exports.getBooleanInput = getBooleanInput;
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
const filePath = process.env['GITHUB_OUTPUT'] || '';
if (filePath) {
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
}
process.stdout.write(os.EOL);
command_1.issueCommand('set-output', { name }, value);
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
}
exports.setOutput = setOutput;
/**
@ -5095,7 +5124,11 @@ exports.group = group;
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
const filePath = process.env['GITHUB_STATE'] || '';
if (filePath) {
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
}
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
}
exports.saveState = saveState;
/**
@ -5161,13 +5194,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.issueCommand = void 0;
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__nccwpck_require__(7147));
const os = __importStar(__nccwpck_require__(2037));
const uuid_1 = __nccwpck_require__(8974);
const utils_1 = __nccwpck_require__(5278);
function issueCommand(command, message) {
function issueFileCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
@ -5179,7 +5213,22 @@ function issueCommand(command, message) {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
exports.issueFileCommand = issueFileCommand;
function prepareKeyValueMessage(key, value) {
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
const convertedValue = utils_1.toCommandValue(value);
// These should realistically never happen, but just in case someone finds a
// way to exploit uuid generation let's not allow keys or values that contain
// the delimiter.
if (key.includes(delimiter)) {
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
}
if (convertedValue.includes(delimiter)) {
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
}
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
}
exports.prepareKeyValueMessage = prepareKeyValueMessage;
//# sourceMappingURL=file-command.js.map
/***/ }),
@ -53282,10 +53331,10 @@ function populateMaps (extensions, types) {
module.exports = minimatch
minimatch.Minimatch = Minimatch
var path = { sep: '/' }
try {
path = __nccwpck_require__(1017)
} catch (er) {}
var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || {
sep: '/'
}
minimatch.sep = path.sep
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
var expand = __nccwpck_require__(3717)
@ -53337,43 +53386,64 @@ function filter (pattern, options) {
}
function ext (a, b) {
a = a || {}
b = b || {}
var t = {}
Object.keys(b).forEach(function (k) {
t[k] = b[k]
})
Object.keys(a).forEach(function (k) {
t[k] = a[k]
})
Object.keys(b).forEach(function (k) {
t[k] = b[k]
})
return t
}
minimatch.defaults = function (def) {
if (!def || !Object.keys(def).length) return minimatch
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
return minimatch
}
var orig = minimatch
var m = function minimatch (p, pattern, options) {
return orig.minimatch(p, pattern, ext(def, options))
return orig(p, pattern, ext(def, options))
}
m.Minimatch = function Minimatch (pattern, options) {
return new orig.Minimatch(pattern, ext(def, options))
}
m.Minimatch.defaults = function defaults (options) {
return orig.defaults(ext(def, options)).Minimatch
}
m.filter = function filter (pattern, options) {
return orig.filter(pattern, ext(def, options))
}
m.defaults = function defaults (options) {
return orig.defaults(ext(def, options))
}
m.makeRe = function makeRe (pattern, options) {
return orig.makeRe(pattern, ext(def, options))
}
m.braceExpand = function braceExpand (pattern, options) {
return orig.braceExpand(pattern, ext(def, options))
}
m.match = function (list, pattern, options) {
return orig.match(list, pattern, ext(def, options))
}
return m
}
Minimatch.defaults = function (def) {
if (!def || !Object.keys(def).length) return Minimatch
return minimatch.defaults(def).Minimatch
}
function minimatch (p, pattern, options) {
if (typeof pattern !== 'string') {
throw new TypeError('glob pattern string required')
}
assertValidPattern(pattern)
if (!options) options = {}
@ -53382,9 +53452,6 @@ function minimatch (p, pattern, options) {
return false
}
// "" only matches ""
if (pattern.trim() === '') return p === ''
return new Minimatch(pattern, options).match(p)
}
@ -53393,15 +53460,14 @@ function Minimatch (pattern, options) {
return new Minimatch(pattern, options)
}
if (typeof pattern !== 'string') {
throw new TypeError('glob pattern string required')
}
assertValidPattern(pattern)
if (!options) options = {}
pattern = pattern.trim()
// windows support: need to use /, not \
if (path.sep !== '/') {
if (!options.allowWindowsEscape && path.sep !== '/') {
pattern = pattern.split(path.sep).join('/')
}
@ -53412,6 +53478,7 @@ function Minimatch (pattern, options) {
this.negate = false
this.comment = false
this.empty = false
this.partial = !!options.partial
// make the set of regexps etc.
this.make()
@ -53421,9 +53488,6 @@ Minimatch.prototype.debug = function () {}
Minimatch.prototype.make = make
function make () {
// don't do it more than once.
if (this._made) return
var pattern = this.pattern
var options = this.options
@ -53443,7 +53507,7 @@ function make () {
// step 2: expand braces
var set = this.globSet = this.braceExpand()
if (options.debug) this.debug = console.error
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
this.debug(this.pattern, set)
@ -53523,12 +53587,11 @@ function braceExpand (pattern, options) {
pattern = typeof pattern === 'undefined'
? this.pattern : pattern
if (typeof pattern === 'undefined') {
throw new TypeError('undefined pattern')
}
assertValidPattern(pattern)
if (options.nobrace ||
!pattern.match(/\{.*\}/)) {
// Thanks to Yeting Li <https://github.com/yetingli> for
// improving this regexp to avoid a ReDOS vulnerability.
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
// shortcut. no need to expand.
return [pattern]
}
@ -53536,6 +53599,17 @@ function braceExpand (pattern, options) {
return expand(pattern)
}
var MAX_PATTERN_LENGTH = 1024 * 64
var assertValidPattern = function (pattern) {
if (typeof pattern !== 'string') {
throw new TypeError('invalid pattern')
}
if (pattern.length > MAX_PATTERN_LENGTH) {
throw new TypeError('pattern is too long')
}
}
// parse a component of the expanded set.
// At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full
@ -53550,14 +53624,17 @@ function braceExpand (pattern, options) {
Minimatch.prototype.parse = parse
var SUBPARSE = {}
function parse (pattern, isSub) {
if (pattern.length > 1024 * 64) {
throw new TypeError('pattern is too long')
}
assertValidPattern(pattern)
var options = this.options
// shortcuts
if (!options.noglobstar && pattern === '**') return GLOBSTAR
if (pattern === '**') {
if (!options.noglobstar)
return GLOBSTAR
else
pattern = '*'
}
if (pattern === '') return ''
var re = ''
@ -53613,10 +53690,12 @@ function parse (pattern, isSub) {
}
switch (c) {
case '/':
/* istanbul ignore next */
case '/': {
// completely not allowed, even escaped.
// Should already be path-split by now.
return false
}
case '\\':
clearStateChar()
@ -53735,25 +53814,23 @@ function parse (pattern, isSub) {
// handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]"
if (inClass) {
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern.substring(classStart + 1, i)
try {
RegExp('[' + cs + ']')
} catch (er) {
// not a valid class!
var sp = this.parse(cs, SUBPARSE)
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
hasMagic = hasMagic || sp[1]
inClass = false
continue
}
// split where the last [ was, make sure we don't have
// an invalid re. if so, re-walk the contents of the
// would-be class to re-translate any characters that
// were passed through as-is
// TODO: It would probably be faster to determine this
// without a try/catch and a new RegExp, but it's tricky
// to do safely. For now, this is safe and works.
var cs = pattern.substring(classStart + 1, i)
try {
RegExp('[' + cs + ']')
} catch (er) {
// not a valid class!
var sp = this.parse(cs, SUBPARSE)
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
hasMagic = hasMagic || sp[1]
inClass = false
continue
}
// finish up the class.
@ -53837,9 +53914,7 @@ function parse (pattern, isSub) {
// something that could conceivably capture a dot
var addPatternStart = false
switch (re.charAt(0)) {
case '.':
case '[':
case '(': addPatternStart = true
case '[': case '.': case '(': addPatternStart = true
}
// Hack to work around lack of negative lookbehind in JS
@ -53901,7 +53976,7 @@ function parse (pattern, isSub) {
var flags = options.nocase ? 'i' : ''
try {
var regExp = new RegExp('^' + re + '$', flags)
} catch (er) {
} catch (er) /* istanbul ignore next - should be impossible */ {
// If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line
@ -53959,7 +54034,7 @@ function makeRe () {
try {
this.regexp = new RegExp(re, flags)
} catch (ex) {
} catch (ex) /* istanbul ignore next - should be impossible */ {
this.regexp = false
}
return this.regexp
@ -53977,8 +54052,8 @@ minimatch.match = function (list, pattern, options) {
return list
}
Minimatch.prototype.match = match
function match (f, partial) {
Minimatch.prototype.match = function match (f, partial) {
if (typeof partial === 'undefined') partial = this.partial
this.debug('match', f, this.pattern)
// short-circuit in the case of busted things.
// comments, etc.
@ -54060,6 +54135,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// should be impossible.
// some invalid regexp stuff in the set.
/* istanbul ignore if */
if (p === false) return false
if (p === GLOBSTAR) {
@ -54133,6 +54209,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// no match was found.
// However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then
/* istanbul ignore if */
if (partial) {
// ran out of file
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
@ -54146,11 +54223,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// patterns with magic have been turned into regexps.
var hit
if (typeof p === 'string') {
if (options.nocase) {
hit = f.toLowerCase() === p.toLowerCase()
} else {
hit = f === p
}
hit = f === p
this.debug('string match', p, f, hit)
} else {
hit = f.match(p)
@ -54181,16 +54254,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// this is ok if we're doing the match as part of
// a glob fs traversal.
return partial
} else if (pi === pl) {
} else /* istanbul ignore else */ if (pi === pl) {
// ran out of pattern, still have file left.
// this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash.
// a/* should match a/b/
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
return emptyFileEnd
return (fi === fl - 1) && (file[fi] === '')
}
// should be unreachable.
/* istanbul ignore next */
throw new Error('wtf?')
}
@ -68175,7 +68248,8 @@ const supportedPackageManager = [
'**/*.gradle*',
'**/gradle-wrapper.properties',
'buildSrc/**/Versions.kt',
'buildSrc/**/Dependencies.kt'
'buildSrc/**/Dependencies.kt',
'gradle/*.versions.toml'
]
},
{
@ -68406,7 +68480,7 @@ else {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
exports.INPUT_MVN_TOOLCHAIN_VENDOR = exports.INPUT_MVN_TOOLCHAIN_ID = exports.MVN_TOOLCHAINS_FILE = exports.MVN_SETTINGS_FILE = exports.M2_DIR = exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = exports.INPUT_JOB_STATUS = exports.INPUT_CACHE = exports.INPUT_DEFAULT_GPG_PASSPHRASE = exports.INPUT_DEFAULT_GPG_PRIVATE_KEY = exports.INPUT_GPG_PASSPHRASE = exports.INPUT_GPG_PRIVATE_KEY = exports.INPUT_OVERWRITE_SETTINGS = exports.INPUT_SETTINGS_PATH = exports.INPUT_SERVER_PASSWORD = exports.INPUT_SERVER_USERNAME = exports.INPUT_SERVER_ID = exports.INPUT_CHECK_LATEST = exports.INPUT_JDK_FILE = exports.INPUT_DISTRIBUTION = exports.INPUT_JAVA_PACKAGE = exports.INPUT_ARCHITECTURE = exports.INPUT_JAVA_VERSION = exports.MACOS_JAVA_CONTENT_POSTFIX = void 0;
exports.MACOS_JAVA_CONTENT_POSTFIX = 'Contents/Home';
exports.INPUT_JAVA_VERSION = 'java-version';
exports.INPUT_ARCHITECTURE = 'architecture';
@ -68426,6 +68500,11 @@ exports.INPUT_DEFAULT_GPG_PASSPHRASE = 'GPG_PASSPHRASE';
exports.INPUT_CACHE = 'cache';
exports.INPUT_JOB_STATUS = 'job-status';
exports.STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
exports.M2_DIR = '.m2';
exports.MVN_SETTINGS_FILE = 'settings.xml';
exports.MVN_TOOLCHAINS_FILE = 'toolchains.xml';
exports.INPUT_MVN_TOOLCHAIN_ID = 'mvn-toolchain-id';
exports.INPUT_MVN_TOOLCHAIN_VENDOR = 'mvn-toolchain-vendor';
/***/ }),

608
dist/setup/index.js vendored

File diff suppressed because it is too large Load Diff

View File

@ -14,6 +14,7 @@
- [Publishing using Apache Maven](#Publishing-using-Apache-Maven)
- [Publishing using Gradle](#Publishing-using-Gradle)
- [Hosted Tool Cache](#Hosted-Tool-Cache)
- [Modifying Maven Toolchains](#Modifying-Maven-Toolchains)
See [action.yml](../action.yml) for more details on task inputs.
@ -79,6 +80,22 @@ steps:
- run: java -cp java HelloWorldApp
```
### Using Microsoft distribution on GHES
`setup-java` comes pre-installed on the appliance with GHES if Actions is enabled. When dynamically downloading the Microsoft Build of OpenJDK distribution, `setup-java` makes a request to `actions/setup-java` to get available versions on github.com (outside of the appliance). These calls to `actions/setup-java` are made via unauthenticated requests, which are limited to [60 requests per hour per IP](https://docs.github.com/en/rest/overview/resources-in-the-rest-api#rate-limiting). If more requests are made within the time frame, then you will start to see rate-limit errors during downloading that looks like: `##[error]API rate limit exceeded for...`.
To get a higher rate limit, you can [generate a personal access token on github.com](https://github.com/settings/tokens/new) and pass it as the `token` input for the action:
```yaml
uses: actions/setup-java@v3
with:
token: ${{ secrets.GH_DOTCOM_TOKEN }}
distribution: 'microsoft'
java-version: '11'
```
If the runner is not able to access github.com, any Java versions requested during a workflow run must come from the runner's tool cache. See "[Setting up the tool cache on self-hosted runners without internet access](https://docs.github.com/en/enterprise-server@3.2/admin/github-actions/managing-access-to-actions-from-githubcom/setting-up-the-tool-cache-on-self-hosted-runners-without-internet-access)" for more information.
### Amazon Corretto
**NOTE:** Amazon Corretto only supports the major version specification.
@ -347,6 +364,103 @@ See the help docs on [Publishing a Package with Gradle](https://help.github.com/
## Hosted Tool Cache
GitHub Hosted Runners have a tool cache that comes with some Java versions pre-installed. This tool cache helps speed up runs and tool setup by not requiring any new downloads. There is an environment variable called `RUNNER_TOOL_CACHE` on each runner that describes the location of this tools cache and this is where you can find the pre-installed versions of Java. `setup-java` works by taking a specific version of Java in this tool cache and adding it to PATH if the version, architecture and distribution match.
Currently, LTS versions of Adopt OpenJDK (`adopt`) are cached on the GitHub Hosted Runners.
Currently, LTS versions of Eclipse Temurin (`temurin`) are cached on the GitHub Hosted Runners.
The tools cache gets updated on a weekly basis. For information regarding locally cached versions of Java on GitHub hosted runners, check out [GitHub Actions Virtual Environments](https://github.com/actions/virtual-environments).
## Modifying Maven Toolchains
The `setup-java` action generates a basic [Maven Toolchains declaration](https://maven.apache.org/guides/mini/guide-using-toolchains.html) for specified Java versions by either creating a minimal toolchains file or extending an existing declaration with the additional JDKs.
### Installing Multiple JDKs With Toolchains
Subsequent calls to `setup-java` with distinct distribution and version parameters will continue to extend the toolchains declaration and make all specified Java versions available.
```yaml
steps:
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: |
8
11
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: 15
```
The result is a Toolchain with entries for JDKs 8, 11 and 15. You can even combine this with custom JDKs of arbitrary versions:
```yaml
- run: |
download_url="https://example.com/java/jdk/6u45-b06/jdk-6u45-linux-x64.tar.gz"
wget -O $RUNNER_TEMP/java_package.tar.gz $download_url
- uses: actions/setup-java@v3
with:
distribution: 'jdkfile'
jdkFile: ${{ runner.temp }}/java_package.tar.gz
java-version: '1.6'
architecture: x64
```
This will generate a Toolchains entry with the following values: `version: 1.6`, `vendor: jkdfile`, `id: Oracle_1.6`.
### Modifying The Toolchain Vendor For JDKs
Each JDK provider will receive a default `vendor` using the `distribution` input value but this can be overridden with the `mvn-toolchain-vendor` parameter as follows.
```yaml
- run: |
download_url="https://example.com/java/jdk/6u45-b06/jdk-6u45-linux-x64.tar.gz"
wget -O $RUNNER_TEMP/java_package.tar.gz $download_url
- uses: actions/setup-java@v3
with:
distribution: 'jdkfile'
jdkFile: ${{ runner.temp }}/java_package.tar.gz
java-version: '1.6'
architecture: x64
mvn-toolchain-vendor: 'Oracle'
```
This will generate a Toolchains entry with the following values: `version: 1.6`, `vendor: Oracle`, `id: Oracle_1.6`.
In case you install multiple versions of Java at once with multi-line `java-version` input setting the `mvn-toolchain-vendor` still only accepts one value and will use this value for installed JDKs as expected when installing multiple versions of the same `distribution`.
```yaml
steps:
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: |
8
11
mvn-toolchain-vendor: Eclipse Temurin
```
### Modifying The Toolchain ID For JDKs
Each JDK provider will receive a default `id` based on the combination of `distribution` and `java-version` in the format of `distribution_java-version` (e.g. `temurin_11`) but this can be overridden with the `mvn-toolchain-id` parameter as follows.
```yaml
steps:
- uses: actions/checkout@v3
- uses: actions/setup-java@v3
with:
distribution: 'temurin'
java-version: '11'
mvn-toolchain-id: 'some_other_id'
- run: java -cp java HelloWorldApp
```
In case you install multiple versions of Java at once you can use the same syntax as used in `java-versions`. Please note that you have to declare an ID for all Java versions that will be installed or the `mvn-toolchain-id` instruction will be skipped wholesale due to mapping ambiguities.
```yaml
steps:
- uses: actions/setup-java@v3
with:
distribution: '<distribution>'
java-version: |
8
11
mvn-toolchain-id: |
something_else
something_other
```

40
package-lock.json generated
View File

@ -9,8 +9,8 @@
"version": "3.4.1",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.0.0",
"@actions/core": "^1.9.0",
"@actions/cache": "^3.0.4",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4",
"@actions/glob": "^0.2.0",
"@actions/http-client": "^1.0.11",
@ -32,9 +32,9 @@
}
},
"node_modules/@actions/cache": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
"integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -73,9 +73,9 @@
}
},
"node_modules/@actions/core": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz",
"integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==",
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"dependencies": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
@ -3674,9 +3674,9 @@
}
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
@ -4790,9 +4790,9 @@
},
"dependencies": {
"@actions/cache": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
"integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -4830,9 +4830,9 @@
}
},
"@actions/core": {
"version": "1.9.1",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.9.1.tgz",
"integrity": "sha512-5ad+U2YGrmmiw6du20AQW5XuWo7UKN2052FjSV7MX+Wfjf8sCqcsZe62NfgHys4QI4/Y+vQvLKYL8jWtA1ZBTA==",
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
"requires": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
@ -7657,9 +7657,9 @@
"dev": true
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"requires": {
"brace-expansion": "^1.1.7"
}

View File

@ -24,8 +24,8 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.0.0",
"@actions/core": "^1.9.0",
"@actions/cache": "^3.0.4",
"@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4",
"@actions/glob": "^0.2.0",
"@actions/http-client": "^1.0.11",

View File

@ -10,15 +10,12 @@ import * as constants from './constants';
import * as gpg from './gpg';
import { getBooleanInput } from './util';
export const M2_DIR = '.m2';
export const SETTINGS_FILE = 'settings.xml';
export async function configureAuthentication() {
const id = core.getInput(constants.INPUT_SERVER_ID);
const username = core.getInput(constants.INPUT_SERVER_USERNAME);
const password = core.getInput(constants.INPUT_SERVER_PASSWORD);
const settingsDirectory =
core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), M2_DIR);
core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), constants.M2_DIR);
const overwriteSettings = getBooleanInput(constants.INPUT_OVERWRITE_SETTINGS, true);
const gpgPrivateKey =
core.getInput(constants.INPUT_GPG_PRIVATE_KEY) || constants.INPUT_DEFAULT_GPG_PRIVATE_KEY;
@ -54,7 +51,7 @@ export async function createAuthenticationSettings(
overwriteSettings: boolean,
gpgPassphrase: string | undefined = undefined
) {
core.info(`Creating ${SETTINGS_FILE} with server-id: ${id}`);
core.info(`Creating ${constants.MVN_SETTINGS_FILE} with server-id: ${id}`);
// when an alternate m2 location is specified use only that location (no .m2 directory)
// otherwise use the home/.m2/ path
await io.mkdirP(settingsDirectory);
@ -106,7 +103,7 @@ export function generate(
}
async function write(directory: string, settings: string, overwriteSettings: boolean) {
const location = path.join(directory, SETTINGS_FILE);
const location = path.join(directory, constants.MVN_SETTINGS_FILE);
const settingsExists = fs.existsSync(location);
if (settingsExists && overwriteSettings) {
core.info(`Overwriting existing file ${location}`);

View File

@ -35,7 +35,8 @@ const supportedPackageManager: PackageManager[] = [
'**/*.gradle*',
'**/gradle-wrapper.properties',
'buildSrc/**/Versions.kt',
'buildSrc/**/Dependencies.kt'
'buildSrc/**/Dependencies.kt',
'gradle/*.versions.toml'
]
},
{

View File

@ -20,3 +20,9 @@ export const INPUT_CACHE = 'cache';
export const INPUT_JOB_STATUS = 'job-status';
export const STATE_GPG_PRIVATE_KEY_FINGERPRINT = 'gpg-private-key-fingerprint';
export const M2_DIR = '.m2';
export const MVN_SETTINGS_FILE = 'settings.xml';
export const MVN_TOOLCHAINS_FILE = 'toolchains.xml';
export const INPUT_MVN_TOOLCHAIN_ID = 'mvn-toolchain-id';
export const INPUT_MVN_TOOLCHAIN_VENDOR = 'mvn-toolchain-vendor';

View File

@ -88,7 +88,7 @@ export class AdoptDistribution extends JavaBase {
private async getAvailableVersions(): Promise<IAdoptAvailableVersions[]> {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
const versionRange = encodeURI('[1.0,100.0]'); // retrieve all available versions
const releaseType = this.stable ? 'ga' : 'ea';

View File

@ -7,6 +7,7 @@ import * as httpm from '@actions/http-client';
import { getToolcachePath, isVersionSatisfies } from '../util';
import { JavaDownloadRelease, JavaInstallerOptions, JavaInstallerResults } from './base-models';
import { MACOS_JAVA_CONTENT_POSTFIX } from '../constants';
import os from 'os';
export abstract class JavaBase {
protected http: httpm.HttpClient;
@ -25,7 +26,7 @@ export abstract class JavaBase {
({ version: this.version, stable: this.stable } = this.normalizeVersion(
installerOptions.version
));
this.architecture = installerOptions.architecture;
this.architecture = installerOptions.architecture || os.arch();
this.packageType = installerOptions.packageType;
this.checkLatest = installerOptions.checkLatest;
}
@ -150,4 +151,24 @@ export abstract class JavaBase {
core.setOutput('version', version);
core.exportVariable(`JAVA_HOME_${majorVersion}_${this.architecture.toUpperCase()}`, toolPath);
}
protected distributionArchitecture(): string {
// default mappings of config architectures to distribution architectures
// override if a distribution uses any different names; see liberica for an example
// node's os.arch() - which this defaults to - can return any of:
// 'arm', 'arm64', 'ia32', 'mips', 'mipsel', 'ppc', 'ppc64', 's390', 's390x', and 'x64'
// so we need to map these to java distribution architectures
// 'amd64' is included here too b/c it's a common alias for 'x64' people might use explicitly
switch (this.architecture) {
case 'amd64':
return 'x64';
case 'ia32':
return 'x86';
case 'arm64':
return 'aarch64';
default:
return this.architecture;
}
}
}

View File

@ -68,7 +68,7 @@ export class CorrettoDistribution extends JavaBase {
private async getAvailableVersions(): Promise<ICorrettoAvailableVersions[]> {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
if (core.isDebug()) {

View File

@ -10,7 +10,7 @@ import path from 'path';
const supportedPlatform = `'linux', 'linux-musl', 'macos', 'solaris', 'windows'`;
const supportedArchitecture = `'x86', 'x64', 'armv7', 'aarch64', 'ppc64le'`;
const supportedArchitectures = `'x86', 'x64', 'armv7', 'aarch64', 'ppc64le'`;
export class LibericaDistributions extends JavaBase {
constructor(installerOptions: JavaInstallerOptions) {
@ -112,7 +112,8 @@ export class LibericaDistributions extends JavaBase {
}
private getArchitectureOptions(): ArchitectureOptions {
switch (this.architecture) {
const arch = this.distributionArchitecture();
switch (arch) {
case 'x86':
return { bitness: '32', arch: 'x86' };
case 'x64':
@ -125,7 +126,7 @@ export class LibericaDistributions extends JavaBase {
return { bitness: '64', arch: 'ppc' };
default:
throw new Error(
`Architecture '${this.architecture}' is not supported. Supported architectures: ${supportedArchitecture}`
`Architecture '${this.architecture}' is not supported. Supported architectures: ${supportedArchitectures}`
);
}
}
@ -156,4 +157,14 @@ export class LibericaDistributions extends JavaBase {
}
return mainVersion;
}
protected distributionArchitecture(): string {
let arch = super.distributionArchitecture();
switch (arch) {
case 'arm':
return 'armv7';
default:
return arch;
}
}
}

View File

@ -37,7 +37,8 @@ export class MicrosoftDistributions extends JavaBase {
}
protected async findPackageForDownload(range: string): Promise<JavaDownloadRelease> {
if (this.architecture !== 'x64' && this.architecture !== 'aarch64') {
const arch = this.distributionArchitecture();
if (arch !== 'x64' && arch !== 'aarch64') {
throw new Error(`Unsupported architecture: ${this.architecture}`);
}
@ -55,7 +56,7 @@ export class MicrosoftDistributions extends JavaBase {
throw new Error('Could not load manifest for Microsoft Build of OpenJDK');
}
const foundRelease = await tc.findFromManifest(range, true, manifest, this.architecture);
const foundRelease = await tc.findFromManifest(range, true, manifest, arch);
if (!foundRelease) {
throw new Error(
@ -72,6 +73,7 @@ export class MicrosoftDistributions extends JavaBase {
// TODO get these dynamically!
// We will need Microsoft to add an endpoint where we can query for versions.
const token = core.getInput('token');
const auth = !token ? undefined : `token ${token}`;
const owner = 'actions';
const repository = 'setup-java';
const branch = 'main';
@ -81,7 +83,7 @@ export class MicrosoftDistributions extends JavaBase {
const fileUrl = `https://api.github.com/repos/${owner}/${repository}/contents/${filePath}?ref=${branch}`;
const headers: OutgoingHttpHeaders = {
authorization: token,
authorization: auth,
accept: 'application/vnd.github.VERSION.raw'
};

View File

@ -86,7 +86,7 @@ export class TemurinDistribution extends JavaBase {
private async getAvailableVersions(): Promise<ITemurinAvailableVersions[]> {
const platform = this.getPlatformOption();
const arch = this.architecture;
const arch = this.distributionArchitecture();
const imageType = this.packageType;
const versionRange = encodeURI('[1.0,100.0]'); // retrieve all available versions
const releaseType = this.stable ? 'ga' : 'ea';

View File

@ -131,14 +131,17 @@ export class ZuluDistribution extends JavaBase {
hw_bitness: string;
abi: string;
} {
if (this.architecture == 'x64') {
return { arch: 'x86', hw_bitness: '64', abi: '' };
} else if (this.architecture == 'x86') {
return { arch: 'x86', hw_bitness: '32', abi: '' };
} else if (this.architecture == 'arm64') {
return { arch: 'arm', hw_bitness: '64', abi: '' };
} else {
return { arch: this.architecture, hw_bitness: '', abi: '' };
const arch = this.distributionArchitecture();
switch (arch) {
case 'x64':
return { arch: 'x86', hw_bitness: '64', abi: '' };
case 'x86':
return { arch: 'x86', hw_bitness: '32', abi: '' };
case 'aarch64':
case 'arm64':
return { arch: 'arm', hw_bitness: '64', abi: '' };
default:
return { arch: arch, hw_bitness: '', abi: '' };
}
}

View File

@ -1,6 +1,7 @@
import * as core from '@actions/core';
import * as auth from './auth';
import { getBooleanInput, isCacheFeatureAvailable } from './util';
import * as toolchains from './toolchains';
import * as constants from './constants';
import { restore } from './cache';
import * as path from 'path';
@ -16,9 +17,14 @@ async function run() {
const jdkFile = core.getInput(constants.INPUT_JDK_FILE);
const cache = core.getInput(constants.INPUT_CACHE);
const checkLatest = getBooleanInput(constants.INPUT_CHECK_LATEST, false);
let toolchainIds = core.getMultilineInput(constants.INPUT_MVN_TOOLCHAIN_ID);
if (versions.length !== toolchainIds.length) {
toolchainIds = [];
}
core.startGroup('Installed distributions');
for (const version of versions) {
for (const [index, version] of versions.entries()) {
const installerOptions: JavaInstallerOptions = {
architecture,
packageType,
@ -32,6 +38,12 @@ async function run() {
}
const result = await distribution.setupJava();
await toolchains.configureToolchains(
version,
distributionName,
result.path,
toolchainIds[index]
);
core.info('');
core.info('Java configuration:');

158
src/toolchains.ts Normal file
View File

@ -0,0 +1,158 @@
import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import * as core from '@actions/core';
import * as io from '@actions/io';
import * as constants from './constants';
import { getBooleanInput } from './util';
import { create as xmlCreate } from 'xmlbuilder2';
interface JdkInfo {
version: string;
vendor: string;
id: string;
jdkHome: string;
}
export async function configureToolchains(
version: string,
distributionName: string,
jdkHome: string,
toolchainId?: string
) {
const vendor = core.getInput(constants.INPUT_MVN_TOOLCHAIN_VENDOR) || distributionName;
const id = toolchainId || `${vendor}_${version}`;
const settingsDirectory =
core.getInput(constants.INPUT_SETTINGS_PATH) || path.join(os.homedir(), constants.M2_DIR);
const overwriteSettings = getBooleanInput(constants.INPUT_OVERWRITE_SETTINGS, true);
await createToolchainsSettings({
jdkInfo: {
version,
vendor,
id,
jdkHome
},
settingsDirectory,
overwriteSettings
});
}
export async function createToolchainsSettings({
jdkInfo,
settingsDirectory,
overwriteSettings
}: {
jdkInfo: JdkInfo;
settingsDirectory: string;
overwriteSettings: boolean;
}) {
core.info(
`Creating ${constants.MVN_TOOLCHAINS_FILE} for JDK version ${jdkInfo.version} from ${jdkInfo.vendor}`
);
// when an alternate m2 location is specified use only that location (no .m2 directory)
// otherwise use the home/.m2/ path
await io.mkdirP(settingsDirectory);
const originalToolchains = await readExistingToolchainsFile(settingsDirectory);
const updatedToolchains = generateToolchainDefinition(
originalToolchains,
jdkInfo.version,
jdkInfo.vendor,
jdkInfo.id,
jdkInfo.jdkHome
);
await writeToolchainsFileToDisk(settingsDirectory, updatedToolchains, overwriteSettings);
}
// only exported for testing purposes
export function generateToolchainDefinition(
original: string,
version: string,
vendor: string,
id: string,
jdkHome: string
) {
let xmlObj;
if (original?.length) {
xmlObj = xmlCreate(original)
.root()
.ele({
toolchain: {
type: 'jdk',
provides: {
version: `${version}`,
vendor: `${vendor}`,
id: `${id}`
},
configuration: {
jdkHome: `${jdkHome}`
}
}
});
} else
xmlObj = xmlCreate({
toolchains: {
'@xmlns': 'https://maven.apache.org/TOOLCHAINS/1.1.0',
'@xmlns:xsi': 'https://www.w3.org/2001/XMLSchema-instance',
'@xsi:schemaLocation':
'https://maven.apache.org/TOOLCHAINS/1.1.0 https://maven.apache.org/xsd/toolchains-1.1.0.xsd',
toolchain: [
{
type: 'jdk',
provides: {
version: `${version}`,
vendor: `${vendor}`,
id: `${id}`
},
configuration: {
jdkHome: `${jdkHome}`
}
}
]
}
});
return xmlObj.end({
format: 'xml',
wellFormed: false,
headless: false,
prettyPrint: true,
width: 80
});
}
async function readExistingToolchainsFile(directory: string) {
const location = path.join(directory, constants.MVN_TOOLCHAINS_FILE);
if (fs.existsSync(location)) {
return fs.readFileSync(location, {
encoding: 'utf-8',
flag: 'r'
});
}
return '';
}
async function writeToolchainsFileToDisk(
directory: string,
settings: string,
overwriteSettings: boolean
) {
const location = path.join(directory, constants.MVN_TOOLCHAINS_FILE);
const settingsExists = fs.existsSync(location);
if (settingsExists && overwriteSettings) {
core.info(`Overwriting existing file ${location}`);
} else if (!settingsExists) {
core.info(`Writing to ${location}`);
} else {
core.info(
`Skipping generation of ${location} because file already exists and overwriting is not enabled`
);
return;
}
return fs.writeFileSync(location, settings, {
encoding: 'utf-8',
flag: 'w'
});
}