Adds input for upload chunk size

This commit is contained in:
Dave Hadka 2020-10-02 09:59:55 -05:00
parent d606e039ae
commit a6f1f4b32e
10 changed files with 84 additions and 11 deletions

View File

@ -15,6 +15,7 @@ beforeAll(() => {
afterEach(() => { afterEach(() => {
delete process.env[Events.Key]; delete process.env[Events.Key];
delete process.env[RefKey]; delete process.env[RefKey];
testUtils.clearInputs();
}); });
test("isGhes returns true if server url is not github.com", () => { test("isGhes returns true if server url is not github.com", () => {
@ -212,3 +213,17 @@ test("getInputAsArray handles empty lines correctly", () => {
testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n"); testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n");
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]); expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
}); });
test("getInputAsInt returns undefined if input not set", () => {
expect(actionUtils.getInputAsInt("foo")).toBeUndefined();
});
test("getInputAsInt returns value if input is valid", () => {
testUtils.setInput("foo", "8");
expect(actionUtils.getInputAsInt("foo")).toBe(8);
});
test("getInputAsInt returns undefined if input is invalid or NaN", () => {
testUtils.setInput("foo", "bar");
expect(actionUtils.getInputAsInt("foo")).toBeUndefined();
});

View File

@ -27,6 +27,14 @@ beforeAll(() => {
} }
); );
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
(name, options) => {
return jest
.requireActual("../src/utils/actionUtils")
.getInputAsInt(name, options);
}
);
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => { (key, cacheResult) => {
return jest return jest
@ -193,7 +201,11 @@ test("save with large cache outputs warning", async () => {
await run(); await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
expect.anything()
);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith( expect(logWarningMock).toHaveBeenCalledWith(
@ -236,7 +248,11 @@ test("save with reserve cache failure outputs warning", async () => {
await run(); await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
expect.anything()
);
expect(infoMock).toHaveBeenCalledWith( expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
@ -274,7 +290,11 @@ test("save with server error outputs warning", async () => {
await run(); await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
expect.anything()
);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
@ -300,6 +320,7 @@ test("save with valid inputs uploads a cache", async () => {
const inputPath = "node_modules"; const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath); testUtils.setInput(Inputs.Path, inputPath);
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
const cacheId = 4; const cacheId = 4;
const saveCacheMock = jest const saveCacheMock = jest
@ -311,7 +332,9 @@ test("save with valid inputs uploads a cache", async () => {
await run(); await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey); expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
uploadChunkSize: 4000000
});
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
}); });

11
dist/restore/index.js vendored
View File

@ -31296,7 +31296,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0; exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(694); const constants_1 = __webpack_require__(694);
function isGhes() { function isGhes() {
@ -31353,6 +31353,14 @@ function getInputAsArray(name, options) {
.filter(x => x !== ""); .filter(x => x !== "");
} }
exports.getInputAsArray = getInputAsArray; exports.getInputAsArray = getInputAsArray;
function getInputAsInt(name, options) {
const value = Number(core.getInput(name, options));
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
exports.getInputAsInt = getInputAsInt;
/***/ }), /***/ }),
@ -38485,6 +38493,7 @@ var Inputs;
Inputs["Key"] = "key"; Inputs["Key"] = "key";
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys"; Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size";
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {

15
dist/save/index.js vendored
View File

@ -31296,7 +31296,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0; exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(694); const constants_1 = __webpack_require__(694);
function isGhes() { function isGhes() {
@ -31353,6 +31353,14 @@ function getInputAsArray(name, options) {
.filter(x => x !== ""); .filter(x => x !== "");
} }
exports.getInputAsArray = getInputAsArray; exports.getInputAsArray = getInputAsArray;
function getInputAsInt(name, options) {
const value = Number(core.getInput(name, options));
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
exports.getInputAsInt = getInputAsInt;
/***/ }), /***/ }),
@ -38353,7 +38361,9 @@ function run() {
required: true required: true
}); });
try { try {
yield cache.saveCache(cachePaths, primaryKey); yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
} }
catch (error) { catch (error) {
if (error.name === cache.ValidationError.name) { if (error.name === cache.ValidationError.name) {
@ -38574,6 +38584,7 @@ var Inputs;
Inputs["Key"] = "key"; Inputs["Key"] = "key";
Inputs["Path"] = "path"; Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys"; Inputs["RestoreKeys"] = "restore-keys";
Inputs["UploadChunkSize"] = "upload-chunk-size";
})(Inputs = exports.Inputs || (exports.Inputs = {})); })(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {

2
package-lock.json generated
View File

@ -1,6 +1,6 @@
{ {
"name": "cache", "name": "cache",
"version": "2.1.1", "version": "2.1.2",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {

View File

@ -1,6 +1,6 @@
{ {
"name": "cache", "name": "cache",
"version": "2.1.1", "version": "2.1.2",
"private": true, "private": true,
"description": "Cache dependencies and build outputs", "description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js", "main": "dist/restore/index.js",

View File

@ -1,7 +1,8 @@
export enum Inputs { export enum Inputs {
Key = "key", Key = "key",
Path = "path", Path = "path",
RestoreKeys = "restore-keys" RestoreKeys = "restore-keys",
UploadChunkSize = "upload-chunk-size"
} }
export enum Outputs { export enum Outputs {

View File

@ -41,7 +41,9 @@ async function run(): Promise<void> {
}); });
try { try {
await cache.saveCache(cachePaths, primaryKey); await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
} catch (error) { } catch (error) {
if (error.name === cache.ValidationError.name) { if (error.name === cache.ValidationError.name) {
throw error; throw error;

View File

@ -63,3 +63,14 @@ export function getInputAsArray(
.map(s => s.trim()) .map(s => s.trim())
.filter(x => x !== ""); .filter(x => x !== "");
} }
export function getInputAsInt(
name: string,
options?: core.InputOptions
): number | undefined {
const value = Number(core.getInput(name, options));
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}

View File

@ -26,4 +26,5 @@ export function clearInputs(): void {
delete process.env[getInputName(Inputs.Path)]; delete process.env[getInputName(Inputs.Path)];
delete process.env[getInputName(Inputs.Key)]; delete process.env[getInputName(Inputs.Key)];
delete process.env[getInputName(Inputs.RestoreKeys)]; delete process.env[getInputName(Inputs.RestoreKeys)];
delete process.env[getInputName(Inputs.UploadChunkSize)];
} }