mirror of
https://github.com/actions/cache.git
synced 2025-06-26 04:11:10 +02:00
Compare commits
26 Commits
v3.1.0-bet
...
tanuj077/c
Author | SHA1 | Date | |
---|---|---|---|
0685539942 | |||
a92fb881ae | |||
5f3ddebb2f | |||
8a88690a20 | |||
6e2c6a5916 | |||
2c9fb32186 | |||
01d96636a0 | |||
9c5a42a7c9 | |||
a172494938 | |||
f8717682fb | |||
af1210e2a3 | |||
ab0e7714ce | |||
fb4a5dce60 | |||
71334c58b2 | |||
888d454557 | |||
dddd7ce07c | |||
abddc4dd44 | |||
921c58ee44 | |||
7f45813c72 | |||
0769f2e443 | |||
5fe0b944ef | |||
69b8227b27 | |||
515d10b4fd | |||
669e7536d9 | |||
29dbbce762 | |||
ea5981db97 |
14
.devcontainer/devcontainer.json
Normal file
14
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"name": "Node.js & TypeScript",
|
||||||
|
"image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
|
||||||
|
// Features to add to the dev container. More info: https://containers.dev/implementors/features.
|
||||||
|
// "features": {},
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
// "forwardPorts": [],
|
||||||
|
// Use 'postCreateCommand' to run commands after the container is created.
|
||||||
|
"postCreateCommand": "npm install && npm run build"
|
||||||
|
// Configure tool-specific properties.
|
||||||
|
// "customizations": {},
|
||||||
|
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||||
|
// "remoteUser": "root"
|
||||||
|
}
|
1
.github/workflows/check-dist.yml
vendored
1
.github/workflows/check-dist.yml
vendored
@ -27,7 +27,6 @@ jobs:
|
|||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
cache: npm
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm ci
|
run: npm ci
|
||||||
- name: Rebuild the dist/ directory
|
- name: Rebuild the dist/ directory
|
||||||
|
12
.github/workflows/workflow.yml
vendored
12
.github/workflows/workflow.yml
vendored
@ -25,7 +25,17 @@ jobs:
|
|||||||
uses: actions/setup-node@v3
|
uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: 16.x
|
node-version: 16.x
|
||||||
cache: npm
|
- name: Determine npm cache directory
|
||||||
|
id: npm-cache
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=dir::$(npm config get cache)"
|
||||||
|
- name: Restore npm cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ${{ steps.npm-cache.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-node-
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
- name: Prettier Format Check
|
- name: Prettier Format Check
|
||||||
run: npm run format-check
|
run: npm run format-check
|
||||||
|
@ -40,6 +40,3 @@
|
|||||||
### 3.0.11
|
### 3.0.11
|
||||||
- Update toolkit version to 3.0.5 to include `@actions/core@^1.10.0`
|
- Update toolkit version to 3.0.5 to include `@actions/core@^1.10.0`
|
||||||
- Update `@actions/cache` to use updated `saveState` and `setOutput` functions from `@actions/core@^1.10.0`
|
- Update `@actions/cache` to use updated `saveState` and `setOutput` functions from `@actions/core@^1.10.0`
|
||||||
|
|
||||||
### 3.1.0-beta.1
|
|
||||||
- Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984))
|
|
||||||
|
@ -324,3 +324,113 @@ test("restore with cache found for restore key", async () => {
|
|||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("restore with enabling save on any failure feature", async () => {
|
||||||
|
const path = "node_modules";
|
||||||
|
const key = "node-test";
|
||||||
|
const restoreKey = "node-";
|
||||||
|
testUtils.setInputs({
|
||||||
|
path: path,
|
||||||
|
key,
|
||||||
|
restoreKeys: [restoreKey],
|
||||||
|
saveOnAnyFailure: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const debugMock = jest.spyOn(core, "debug");
|
||||||
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const restoreCacheMock = jest
|
||||||
|
.spyOn(cache, "restoreCache")
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve(restoreKey);
|
||||||
|
});
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||||
|
|
||||||
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||||
|
|
||||||
|
expect(debugMock).toHaveBeenCalledWith(
|
||||||
|
`Exporting environment variable SAVE_CACHE_ON_ANY_FAILURE`
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
|
`Input Variable SAVE_CACHE_ON_ANY_FAILURE is set to true, the cache will be saved despite of any failure in the build.`
|
||||||
|
);
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Fail restore when fail on cache miss is enabled and primary key not found", async () => {
|
||||||
|
const path = "node_modules";
|
||||||
|
const key = "node-test";
|
||||||
|
const restoreKey = "node-";
|
||||||
|
testUtils.setInputs({
|
||||||
|
path: path,
|
||||||
|
key,
|
||||||
|
restoreKeys: [restoreKey],
|
||||||
|
failOnCacheMiss: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const restoreCacheMock = jest
|
||||||
|
.spyOn(cache, "restoreCache")
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve(undefined);
|
||||||
|
});
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||||
|
|
||||||
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
|
`Cache with the given input key ${key} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||||
|
);
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("Fail restore when fail on cache miss is enabled and primary key doesn't match restored key", async () => {
|
||||||
|
const path = "node_modules";
|
||||||
|
const key = "node-test";
|
||||||
|
const restoreKey = "node-";
|
||||||
|
testUtils.setInputs({
|
||||||
|
path: path,
|
||||||
|
key,
|
||||||
|
restoreKeys: [restoreKey],
|
||||||
|
failOnCacheMiss: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const restoreCacheMock = jest
|
||||||
|
.spyOn(cache, "restoreCache")
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve(restoreKey);
|
||||||
|
});
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||||
|
|
||||||
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||||
|
|
||||||
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
|
`Restored cache key doesn't match the given input key ${key}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||||
|
);
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
165
__tests__/save-only.test.ts
Normal file
165
__tests__/save-only.test.ts
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
import * as cache from "@actions/cache";
|
||||||
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
|
import { Events, Inputs, RefKey } from "../src/constants";
|
||||||
|
import run from "../src/save-only";
|
||||||
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
jest.mock("@actions/core");
|
||||||
|
jest.mock("@actions/cache");
|
||||||
|
jest.mock("../src/utils/actionUtils");
|
||||||
|
|
||||||
|
beforeAll(() => {
|
||||||
|
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||||
|
return jest.requireActual("@actions/core").getInput(name, options);
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => {
|
||||||
|
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.getInputAsArray(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
|
||||||
|
(name, options) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.getInputAsInt(name, options);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
|
(key, cacheResult) => {
|
||||||
|
return jest
|
||||||
|
.requireActual("../src/utils/actionUtils")
|
||||||
|
.isExactKeyMatch(key, cacheResult);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.isValidEvent();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
process.env[Events.Key] = Events.Push;
|
||||||
|
process.env[RefKey] = "refs/heads/feature-branch";
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||||
|
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||||
|
() => true
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
testUtils.clearInputs();
|
||||||
|
delete process.env[Events.Key];
|
||||||
|
delete process.env[RefKey];
|
||||||
|
});
|
||||||
|
|
||||||
|
test("save cache when save-only is required", async () => {
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
|
const savedCacheKey = "Linux-node-";
|
||||||
|
|
||||||
|
jest.spyOn(core, "getInput")
|
||||||
|
// Cache Entry State
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return savedCacheKey;
|
||||||
|
})
|
||||||
|
// Cache Key
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return primaryKey;
|
||||||
|
});
|
||||||
|
|
||||||
|
const inputPath = "node_modules";
|
||||||
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||||
|
|
||||||
|
const cacheId = 4;
|
||||||
|
const saveCacheMock = jest
|
||||||
|
.spyOn(cache, "saveCache")
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve(cacheId);
|
||||||
|
});
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||||
|
uploadChunkSize: 4000000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("save when save on any failure is true", async () => {
|
||||||
|
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
|
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
|
const primaryKey = "Linux-node-";
|
||||||
|
const inputPath = "node_modules";
|
||||||
|
|
||||||
|
jest.spyOn(core, "getInput")
|
||||||
|
// Cache Entry State
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return savedCacheKey;
|
||||||
|
})
|
||||||
|
// Cache Key
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return primaryKey;
|
||||||
|
});
|
||||||
|
|
||||||
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||||
|
testUtils.setInput(Inputs.SaveOnAnyFailure, "true");
|
||||||
|
|
||||||
|
const cacheId = 4;
|
||||||
|
const saveCacheMock = jest
|
||||||
|
.spyOn(cache, "saveCache")
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve(cacheId);
|
||||||
|
});
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("save with no primary key in input outputs warning", async () => {
|
||||||
|
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
|
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
|
jest.spyOn(core, "getState")
|
||||||
|
// Cache Entry State
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return savedCacheKey;
|
||||||
|
})
|
||||||
|
// Cache Key
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return "";
|
||||||
|
});
|
||||||
|
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
|
`Error retrieving key from inputs.`
|
||||||
|
);
|
||||||
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
});
|
10
action.yml
10
action.yml
@ -14,6 +14,14 @@ inputs:
|
|||||||
upload-chunk-size:
|
upload-chunk-size:
|
||||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||||
required: false
|
required: false
|
||||||
|
exit-on-cache-miss:
|
||||||
|
description: 'Fail the workflow if the cache is not found for the primary key'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
save-on-any-failure:
|
||||||
|
description: 'Save cache (on cache miss) despite of any failure during the workflow run'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
@ -21,7 +29,7 @@ runs:
|
|||||||
using: 'node16'
|
using: 'node16'
|
||||||
main: 'dist/restore/index.js'
|
main: 'dist/restore/index.js'
|
||||||
post: 'dist/save/index.js'
|
post: 'dist/save/index.js'
|
||||||
post-if: 'success()'
|
post-if: (success() || (env.SAVE_CACHE_ON_ANY_FAILURE == 'yes'))
|
||||||
branding:
|
branding:
|
||||||
icon: 'archive'
|
icon: 'archive'
|
||||||
color: 'gray-dark'
|
color: 'gray-dark'
|
||||||
|
702
dist/restore/index.js
vendored
702
dist/restore/index.js
vendored
@ -1177,6 +1177,10 @@ function getVersion(app) {
|
|||||||
// Use zstandard if possible to maximize cache performance
|
// Use zstandard if possible to maximize cache performance
|
||||||
function getCompressionMethod() {
|
function getCompressionMethod() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
if (process.platform === 'win32' && !(yield isGnuTarInstalled())) {
|
||||||
|
// Disable zstd due to bug https://github.com/actions/cache/issues/301
|
||||||
|
return constants_1.CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
const versionOutput = yield getVersion('zstd');
|
const versionOutput = yield getVersion('zstd');
|
||||||
const version = semver.clean(versionOutput);
|
const version = semver.clean(versionOutput);
|
||||||
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
if (!versionOutput.toLowerCase().includes('zstd command line interface')) {
|
||||||
@ -1200,16 +1204,13 @@ function getCacheFileName(compressionMethod) {
|
|||||||
: constants_1.CacheFilename.Zstd;
|
: constants_1.CacheFilename.Zstd;
|
||||||
}
|
}
|
||||||
exports.getCacheFileName = getCacheFileName;
|
exports.getCacheFileName = getCacheFileName;
|
||||||
function getGnuTarPathOnWindows() {
|
function isGnuTarInstalled() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
if (fs.existsSync(constants_1.GnuTarPathOnWindows)) {
|
|
||||||
return constants_1.GnuTarPathOnWindows;
|
|
||||||
}
|
|
||||||
const versionOutput = yield getVersion('tar');
|
const versionOutput = yield getVersion('tar');
|
||||||
return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : '';
|
return versionOutput.toLowerCase().includes('gnu tar');
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows;
|
exports.isGnuTarInstalled = isGnuTarInstalled;
|
||||||
function assertDefined(name, value) {
|
function assertDefined(name, value) {
|
||||||
if (value === undefined) {
|
if (value === undefined) {
|
||||||
throw Error(`Expected ${name} but value was undefiend`);
|
throw Error(`Expected ${name} but value was undefiend`);
|
||||||
@ -3045,18 +3046,19 @@ exports.default = _default;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
/* 105 */,
|
/* 105 */,
|
||||||
/* 106 */
|
/* 106 */
|
||||||
/***/ (function(__unusedmodule, exports) {
|
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
Object.defineProperty(exports, '__esModule', { value: true });
|
Object.defineProperty(exports, '__esModule', { value: true });
|
||||||
|
|
||||||
|
var tslib = __webpack_require__(640);
|
||||||
|
|
||||||
// Copyright (c) Microsoft Corporation.
|
// Copyright (c) Microsoft Corporation.
|
||||||
// Licensed under the MIT license.
|
// Licensed under the MIT license.
|
||||||
/// <reference path="../shims-public.d.ts" />
|
var listenersMap = new WeakMap();
|
||||||
const listenersMap = new WeakMap();
|
var abortedMap = new WeakMap();
|
||||||
const abortedMap = new WeakMap();
|
|
||||||
/**
|
/**
|
||||||
* An aborter instance implements AbortSignal interface, can abort HTTP requests.
|
* An aborter instance implements AbortSignal interface, can abort HTTP requests.
|
||||||
*
|
*
|
||||||
@ -3070,8 +3072,8 @@ const abortedMap = new WeakMap();
|
|||||||
* await doAsyncWork(AbortSignal.none);
|
* await doAsyncWork(AbortSignal.none);
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
class AbortSignal {
|
var AbortSignal = /** @class */ (function () {
|
||||||
constructor() {
|
function AbortSignal() {
|
||||||
/**
|
/**
|
||||||
* onabort event listener.
|
* onabort event listener.
|
||||||
*/
|
*/
|
||||||
@ -3079,65 +3081,74 @@ class AbortSignal {
|
|||||||
listenersMap.set(this, []);
|
listenersMap.set(this, []);
|
||||||
abortedMap.set(this, false);
|
abortedMap.set(this, false);
|
||||||
}
|
}
|
||||||
|
Object.defineProperty(AbortSignal.prototype, "aborted", {
|
||||||
/**
|
/**
|
||||||
* Status of whether aborted or not.
|
* Status of whether aborted or not.
|
||||||
*
|
*
|
||||||
* @readonly
|
* @readonly
|
||||||
*/
|
*/
|
||||||
get aborted() {
|
get: function () {
|
||||||
if (!abortedMap.has(this)) {
|
if (!abortedMap.has(this)) {
|
||||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||||
}
|
}
|
||||||
return abortedMap.get(this);
|
return abortedMap.get(this);
|
||||||
}
|
},
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
|
Object.defineProperty(AbortSignal, "none", {
|
||||||
/**
|
/**
|
||||||
* Creates a new AbortSignal instance that will never be aborted.
|
* Creates a new AbortSignal instance that will never be aborted.
|
||||||
*
|
*
|
||||||
* @readonly
|
* @readonly
|
||||||
*/
|
*/
|
||||||
static get none() {
|
get: function () {
|
||||||
return new AbortSignal();
|
return new AbortSignal();
|
||||||
}
|
},
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
/**
|
/**
|
||||||
* Added new "abort" event listener, only support "abort" event.
|
* Added new "abort" event listener, only support "abort" event.
|
||||||
*
|
*
|
||||||
* @param _type - Only support "abort" event
|
* @param _type - Only support "abort" event
|
||||||
* @param listener - The listener to be added
|
* @param listener - The listener to be added
|
||||||
*/
|
*/
|
||||||
addEventListener(
|
AbortSignal.prototype.addEventListener = function (
|
||||||
// tslint:disable-next-line:variable-name
|
// tslint:disable-next-line:variable-name
|
||||||
_type, listener) {
|
_type, listener) {
|
||||||
if (!listenersMap.has(this)) {
|
if (!listenersMap.has(this)) {
|
||||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||||
}
|
}
|
||||||
const listeners = listenersMap.get(this);
|
var listeners = listenersMap.get(this);
|
||||||
listeners.push(listener);
|
listeners.push(listener);
|
||||||
}
|
};
|
||||||
/**
|
/**
|
||||||
* Remove "abort" event listener, only support "abort" event.
|
* Remove "abort" event listener, only support "abort" event.
|
||||||
*
|
*
|
||||||
* @param _type - Only support "abort" event
|
* @param _type - Only support "abort" event
|
||||||
* @param listener - The listener to be removed
|
* @param listener - The listener to be removed
|
||||||
*/
|
*/
|
||||||
removeEventListener(
|
AbortSignal.prototype.removeEventListener = function (
|
||||||
// tslint:disable-next-line:variable-name
|
// tslint:disable-next-line:variable-name
|
||||||
_type, listener) {
|
_type, listener) {
|
||||||
if (!listenersMap.has(this)) {
|
if (!listenersMap.has(this)) {
|
||||||
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
throw new TypeError("Expected `this` to be an instance of AbortSignal.");
|
||||||
}
|
}
|
||||||
const listeners = listenersMap.get(this);
|
var listeners = listenersMap.get(this);
|
||||||
const index = listeners.indexOf(listener);
|
var index = listeners.indexOf(listener);
|
||||||
if (index > -1) {
|
if (index > -1) {
|
||||||
listeners.splice(index, 1);
|
listeners.splice(index, 1);
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
/**
|
/**
|
||||||
* Dispatches a synthetic event to the AbortSignal.
|
* Dispatches a synthetic event to the AbortSignal.
|
||||||
*/
|
*/
|
||||||
dispatchEvent(_event) {
|
AbortSignal.prototype.dispatchEvent = function (_event) {
|
||||||
throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
|
throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes.");
|
||||||
}
|
};
|
||||||
}
|
return AbortSignal;
|
||||||
|
}());
|
||||||
/**
|
/**
|
||||||
* Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
|
* Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered.
|
||||||
* Will try to trigger abort event for all linked AbortSignal nodes.
|
* Will try to trigger abort event for all linked AbortSignal nodes.
|
||||||
@ -3155,12 +3166,12 @@ function abortSignal(signal) {
|
|||||||
if (signal.onabort) {
|
if (signal.onabort) {
|
||||||
signal.onabort.call(signal);
|
signal.onabort.call(signal);
|
||||||
}
|
}
|
||||||
const listeners = listenersMap.get(signal);
|
var listeners = listenersMap.get(signal);
|
||||||
if (listeners) {
|
if (listeners) {
|
||||||
// Create a copy of listeners so mutations to the array
|
// Create a copy of listeners so mutations to the array
|
||||||
// (e.g. via removeListener calls) don't affect the listeners
|
// (e.g. via removeListener calls) don't affect the listeners
|
||||||
// we invoke.
|
// we invoke.
|
||||||
listeners.slice().forEach((listener) => {
|
listeners.slice().forEach(function (listener) {
|
||||||
listener.call(signal, { type: "abort" });
|
listener.call(signal, { type: "abort" });
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -3186,12 +3197,15 @@ function abortSignal(signal) {
|
|||||||
* }
|
* }
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
class AbortError extends Error {
|
var AbortError = /** @class */ (function (_super) {
|
||||||
constructor(message) {
|
tslib.__extends(AbortError, _super);
|
||||||
super(message);
|
function AbortError(message) {
|
||||||
this.name = "AbortError";
|
var _this = _super.call(this, message) || this;
|
||||||
|
_this.name = "AbortError";
|
||||||
|
return _this;
|
||||||
}
|
}
|
||||||
}
|
return AbortError;
|
||||||
|
}(Error));
|
||||||
/**
|
/**
|
||||||
* An AbortController provides an AbortSignal and the associated controls to signal
|
* An AbortController provides an AbortSignal and the associated controls to signal
|
||||||
* that an asynchronous operation should be aborted.
|
* that an asynchronous operation should be aborted.
|
||||||
@ -3226,9 +3240,10 @@ class AbortError extends Error {
|
|||||||
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
* await doAsyncWork(aborter.withTimeout(25 * 1000));
|
||||||
* ```
|
* ```
|
||||||
*/
|
*/
|
||||||
class AbortController {
|
var AbortController = /** @class */ (function () {
|
||||||
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
|
||||||
constructor(parentSignals) {
|
function AbortController(parentSignals) {
|
||||||
|
var _this = this;
|
||||||
this._signal = new AbortSignal();
|
this._signal = new AbortSignal();
|
||||||
if (!parentSignals) {
|
if (!parentSignals) {
|
||||||
return;
|
return;
|
||||||
@ -3238,7 +3253,8 @@ class AbortController {
|
|||||||
// eslint-disable-next-line prefer-rest-params
|
// eslint-disable-next-line prefer-rest-params
|
||||||
parentSignals = arguments;
|
parentSignals = arguments;
|
||||||
}
|
}
|
||||||
for (const parentSignal of parentSignals) {
|
for (var _i = 0, parentSignals_1 = parentSignals; _i < parentSignals_1.length; _i++) {
|
||||||
|
var parentSignal = parentSignals_1[_i];
|
||||||
// if the parent signal has already had abort() called,
|
// if the parent signal has already had abort() called,
|
||||||
// then call abort on this signal as well.
|
// then call abort on this signal as well.
|
||||||
if (parentSignal.aborted) {
|
if (parentSignal.aborted) {
|
||||||
@ -3246,42 +3262,47 @@ class AbortController {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
// when the parent signal aborts, this signal should as well.
|
// when the parent signal aborts, this signal should as well.
|
||||||
parentSignal.addEventListener("abort", () => {
|
parentSignal.addEventListener("abort", function () {
|
||||||
this.abort();
|
_this.abort();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Object.defineProperty(AbortController.prototype, "signal", {
|
||||||
/**
|
/**
|
||||||
* The AbortSignal associated with this controller that will signal aborted
|
* The AbortSignal associated with this controller that will signal aborted
|
||||||
* when the abort method is called on this controller.
|
* when the abort method is called on this controller.
|
||||||
*
|
*
|
||||||
* @readonly
|
* @readonly
|
||||||
*/
|
*/
|
||||||
get signal() {
|
get: function () {
|
||||||
return this._signal;
|
return this._signal;
|
||||||
}
|
},
|
||||||
|
enumerable: false,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
/**
|
/**
|
||||||
* Signal that any operations passed this controller's associated abort signal
|
* Signal that any operations passed this controller's associated abort signal
|
||||||
* to cancel any remaining work and throw an `AbortError`.
|
* to cancel any remaining work and throw an `AbortError`.
|
||||||
*/
|
*/
|
||||||
abort() {
|
AbortController.prototype.abort = function () {
|
||||||
abortSignal(this._signal);
|
abortSignal(this._signal);
|
||||||
}
|
};
|
||||||
/**
|
/**
|
||||||
* Creates a new AbortSignal instance that will abort after the provided ms.
|
* Creates a new AbortSignal instance that will abort after the provided ms.
|
||||||
* @param ms - Elapsed time in milliseconds to trigger an abort.
|
* @param ms - Elapsed time in milliseconds to trigger an abort.
|
||||||
*/
|
*/
|
||||||
static timeout(ms) {
|
AbortController.timeout = function (ms) {
|
||||||
const signal = new AbortSignal();
|
var signal = new AbortSignal();
|
||||||
const timer = setTimeout(abortSignal, ms, signal);
|
var timer = setTimeout(abortSignal, ms, signal);
|
||||||
// Prevent the active Timer from keeping the Node.js event loop active.
|
// Prevent the active Timer from keeping the Node.js event loop active.
|
||||||
if (typeof timer.unref === "function") {
|
if (typeof timer.unref === "function") {
|
||||||
timer.unref();
|
timer.unref();
|
||||||
}
|
}
|
||||||
return signal;
|
return signal;
|
||||||
}
|
};
|
||||||
}
|
return AbortController;
|
||||||
|
}());
|
||||||
|
|
||||||
exports.AbortController = AbortController;
|
exports.AbortController = AbortController;
|
||||||
exports.AbortError = AbortError;
|
exports.AbortError = AbortError;
|
||||||
@ -4943,13 +4964,15 @@ exports.checkBypass = checkBypass;
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
exports.RefKey = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
exports.RefKey = exports.Variables = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||||
var Inputs;
|
var Inputs;
|
||||||
(function (Inputs) {
|
(function (Inputs) {
|
||||||
Inputs["Key"] = "key";
|
Inputs["Key"] = "key";
|
||||||
Inputs["Path"] = "path";
|
Inputs["Path"] = "path";
|
||||||
Inputs["RestoreKeys"] = "restore-keys";
|
Inputs["RestoreKeys"] = "restore-keys";
|
||||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||||
|
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
||||||
|
Inputs["SaveOnAnyFailure"] = "save-on-any-failure";
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var Outputs;
|
var Outputs;
|
||||||
(function (Outputs) {
|
(function (Outputs) {
|
||||||
@ -4966,6 +4989,10 @@ var Events;
|
|||||||
Events["Push"] = "push";
|
Events["Push"] = "push";
|
||||||
Events["PullRequest"] = "pull_request";
|
Events["PullRequest"] = "pull_request";
|
||||||
})(Events = exports.Events || (exports.Events = {}));
|
})(Events = exports.Events || (exports.Events = {}));
|
||||||
|
var Variables;
|
||||||
|
(function (Variables) {
|
||||||
|
Variables["SaveCacheOnAnyFailure"] = "SAVE_CACHE_ON_ANY_FAILURE";
|
||||||
|
})(Variables = exports.Variables || (exports.Variables = {}));
|
||||||
exports.RefKey = "GITHUB_REF";
|
exports.RefKey = "GITHUB_REF";
|
||||||
|
|
||||||
|
|
||||||
@ -38034,19 +38061,21 @@ const path = __importStar(__webpack_require__(622));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
// Function also mutates the args array. For non-mutation call with passing an empty array.
|
function getTarPath(args, compressionMethod) {
|
||||||
function getTarPath() {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'win32': {
|
case 'win32': {
|
||||||
const gnuTar = yield utils.getGnuTarPathOnWindows();
|
const systemTar = `${process.env['windir']}\\System32\\tar.exe`;
|
||||||
const systemTar = constants_1.SystemTarPathOnWindows;
|
if (compressionMethod !== constants_1.CompressionMethod.Gzip) {
|
||||||
if (gnuTar) {
|
// We only use zstandard compression on windows when gnu tar is installed due to
|
||||||
// Use GNUtar as default on windows
|
// a bug with compressing large files with bsdtar + zstd
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
else if (fs_1.existsSync(systemTar)) {
|
else if (fs_1.existsSync(systemTar)) {
|
||||||
return { path: systemTar, type: constants_1.ArchiveToolType.BSD };
|
return systemTar;
|
||||||
|
}
|
||||||
|
else if (yield utils.isGnuTarInstalled()) {
|
||||||
|
args.push('--force-local');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -38054,83 +38083,24 @@ function getTarPath() {
|
|||||||
const gnuTar = yield io.which('gtar', false);
|
const gnuTar = yield io.which('gtar', false);
|
||||||
if (gnuTar) {
|
if (gnuTar) {
|
||||||
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
// fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
|
||||||
return { path: gnuTar, type: constants_1.ArchiveToolType.GNU };
|
args.push('--delay-directory-restore');
|
||||||
}
|
return gnuTar;
|
||||||
else {
|
|
||||||
return {
|
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.BSD
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
return {
|
return yield io.which('tar', true);
|
||||||
path: yield io.which('tar', true),
|
|
||||||
type: constants_1.ArchiveToolType.GNU
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// Return arguments for tar as per tarPath, compressionMethod, method type and os
|
function execTar(args, compressionMethod, cwd) {
|
||||||
function getTarArgs(tarPath, compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = [`"${tarPath.path}"`];
|
try {
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd });
|
||||||
const tarFile = 'cache.tar';
|
|
||||||
const workingDirectory = getWorkingDirectory();
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
// Method specific args
|
|
||||||
switch (type) {
|
|
||||||
case 'create':
|
|
||||||
args.push('--posix', '-cf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename);
|
|
||||||
break;
|
|
||||||
case 'extract':
|
|
||||||
args.push('-xf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'));
|
|
||||||
break;
|
|
||||||
case 'list':
|
|
||||||
args.push('-tf', BSD_TAR_ZSTD
|
|
||||||
? tarFile
|
|
||||||
: archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P');
|
|
||||||
break;
|
|
||||||
}
|
}
|
||||||
// Platform specific args
|
catch (error) {
|
||||||
if (tarPath.type === constants_1.ArchiveToolType.GNU) {
|
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
||||||
switch (process.platform) {
|
|
||||||
case 'win32':
|
|
||||||
args.push('--force-local');
|
|
||||||
break;
|
|
||||||
case 'darwin':
|
|
||||||
args.push('--delay-directory-restore');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return args;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
function getArgs(compressionMethod, type, archivePath = '') {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const tarPath = yield getTarPath();
|
|
||||||
const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath);
|
|
||||||
const compressionArgs = type !== 'create'
|
|
||||||
? yield getDecompressionProgram(tarPath, compressionMethod, archivePath)
|
|
||||||
: yield getCompressionProgram(tarPath, compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
if (BSD_TAR_ZSTD && type !== 'create') {
|
|
||||||
return [...compressionArgs, ...tarArgs].join(' ');
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return [...tarArgs, ...compressionArgs].join(' ');
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -38139,89 +38109,32 @@ function getWorkingDirectory() {
|
|||||||
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
|
||||||
}
|
}
|
||||||
// Common function for extractTar and listTar to get the compression method
|
// Common function for extractTar and listTar to get the compression method
|
||||||
function getDecompressionProgram(tarPath, compressionMethod, archivePath) {
|
function getCompressionProgram(compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
// -d: Decompress.
|
// -d: Decompress.
|
||||||
// unzstd is equivalent to 'zstd -d'
|
// unzstd is equivalent to 'zstd -d'
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return BSD_TAR_ZSTD
|
return [
|
||||||
? [
|
|
||||||
'zstd -d --long=30 -o',
|
|
||||||
constants_1.TarFilename,
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'&&'
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
'--use-compress-program',
|
||||||
IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30'
|
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
|
||||||
];
|
];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return BSD_TAR_ZSTD
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
|
||||||
? [
|
|
||||||
'zstd -d -o',
|
|
||||||
constants_1.TarFilename,
|
|
||||||
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
'&&'
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd'];
|
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
});
|
|
||||||
}
|
|
||||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
|
||||||
// zstdmt is equivalent to 'zstd -T0'
|
|
||||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
|
||||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
|
||||||
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
|
||||||
function getCompressionProgram(tarPath, compressionMethod) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
|
||||||
const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD &&
|
|
||||||
compressionMethod !== constants_1.CompressionMethod.Gzip &&
|
|
||||||
IS_WINDOWS;
|
|
||||||
switch (compressionMethod) {
|
|
||||||
case constants_1.CompressionMethod.Zstd:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'&&',
|
|
||||||
'zstd -T0 --long=30 -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: [
|
|
||||||
'--use-compress-program',
|
|
||||||
IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30'
|
|
||||||
];
|
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
|
||||||
return BSD_TAR_ZSTD
|
|
||||||
? [
|
|
||||||
'&&',
|
|
||||||
'zstd -T0 -o',
|
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
|
||||||
constants_1.TarFilename
|
|
||||||
]
|
|
||||||
: ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt'];
|
|
||||||
default:
|
|
||||||
return ['-z'];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
function listTar(archivePath, compressionMethod) {
|
function listTar(archivePath, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const args = yield getArgs(compressionMethod, 'list', archivePath);
|
const args = [
|
||||||
try {
|
...getCompressionProgram(compressionMethod),
|
||||||
yield exec_1.exec(args);
|
'-tf',
|
||||||
}
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
catch (error) {
|
'-P'
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
];
|
||||||
}
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.listTar = listTar;
|
exports.listTar = listTar;
|
||||||
@ -38230,27 +38143,57 @@ function extractTar(archivePath, compressionMethod) {
|
|||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
const workingDirectory = getWorkingDirectory();
|
const workingDirectory = getWorkingDirectory();
|
||||||
yield io.mkdirP(workingDirectory);
|
yield io.mkdirP(workingDirectory);
|
||||||
const args = yield getArgs(compressionMethod, 'extract', archivePath);
|
const args = [
|
||||||
try {
|
...getCompressionProgram(compressionMethod),
|
||||||
yield exec_1.exec(args);
|
'-xf',
|
||||||
}
|
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
catch (error) {
|
'-P',
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
'-C',
|
||||||
}
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.extractTar = extractTar;
|
exports.extractTar = extractTar;
|
||||||
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Write source directories to manifest.txt to avoid command length limits
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n'));
|
const manifestFilename = 'manifest.txt';
|
||||||
const args = yield getArgs(compressionMethod, 'create');
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
try {
|
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
|
||||||
yield exec_1.exec(args, undefined, { cwd: archiveFolder });
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// zstdmt is equivalent to 'zstd -T0'
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
|
||||||
|
function getCompressionProgram() {
|
||||||
|
switch (compressionMethod) {
|
||||||
|
case constants_1.CompressionMethod.Zstd:
|
||||||
|
return [
|
||||||
|
'--use-compress-program',
|
||||||
|
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
|
||||||
|
];
|
||||||
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
|
return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
|
||||||
|
default:
|
||||||
|
return ['-z'];
|
||||||
}
|
}
|
||||||
catch (error) {
|
|
||||||
throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
||||||
}
|
}
|
||||||
|
const args = [
|
||||||
|
'--posix',
|
||||||
|
...getCompressionProgram(),
|
||||||
|
'-cf',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'-P',
|
||||||
|
'-C',
|
||||||
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--files-from',
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
yield execTar(args, compressionMethod, archiveFolder);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTar = createTar;
|
exports.createTar = createTar;
|
||||||
@ -38552,16 +38495,17 @@ function getInputAsInt(name, options) {
|
|||||||
}
|
}
|
||||||
exports.getInputAsInt = getInputAsInt;
|
exports.getInputAsInt = getInputAsInt;
|
||||||
function isCacheFeatureAvailable() {
|
function isCacheFeatureAvailable() {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (!cache.isFeatureAvailable()) {
|
||||||
return true;
|
|
||||||
}
|
|
||||||
if (isGhes()) {
|
if (isGhes()) {
|
||||||
logWarning(`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
|
logWarning(`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
|
||||||
Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`);
|
Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions.");
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions.");
|
return true;
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
exports.isCacheFeatureAvailable = isCacheFeatureAvailable;
|
exports.isCacheFeatureAvailable = isCacheFeatureAvailable;
|
||||||
|
|
||||||
@ -44189,7 +44133,318 @@ exports.default = _default;
|
|||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
/* 640 */,
|
/* 640 */
|
||||||
|
/***/ (function(module) {
|
||||||
|
|
||||||
|
/*! *****************************************************************************
|
||||||
|
Copyright (c) Microsoft Corporation.
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||||
|
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||||
|
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||||
|
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||||
|
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||||
|
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||||
|
PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
***************************************************************************** */
|
||||||
|
/* global global, define, System, Reflect, Promise */
|
||||||
|
var __extends;
|
||||||
|
var __assign;
|
||||||
|
var __rest;
|
||||||
|
var __decorate;
|
||||||
|
var __param;
|
||||||
|
var __metadata;
|
||||||
|
var __awaiter;
|
||||||
|
var __generator;
|
||||||
|
var __exportStar;
|
||||||
|
var __values;
|
||||||
|
var __read;
|
||||||
|
var __spread;
|
||||||
|
var __spreadArrays;
|
||||||
|
var __spreadArray;
|
||||||
|
var __await;
|
||||||
|
var __asyncGenerator;
|
||||||
|
var __asyncDelegator;
|
||||||
|
var __asyncValues;
|
||||||
|
var __makeTemplateObject;
|
||||||
|
var __importStar;
|
||||||
|
var __importDefault;
|
||||||
|
var __classPrivateFieldGet;
|
||||||
|
var __classPrivateFieldSet;
|
||||||
|
var __createBinding;
|
||||||
|
(function (factory) {
|
||||||
|
var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {};
|
||||||
|
if (typeof define === "function" && define.amd) {
|
||||||
|
define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); });
|
||||||
|
}
|
||||||
|
else if ( true && typeof module.exports === "object") {
|
||||||
|
factory(createExporter(root, createExporter(module.exports)));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
factory(createExporter(root));
|
||||||
|
}
|
||||||
|
function createExporter(exports, previous) {
|
||||||
|
if (exports !== root) {
|
||||||
|
if (typeof Object.create === "function") {
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
exports.__esModule = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return function (id, v) { return exports[id] = previous ? previous(id, v) : v; };
|
||||||
|
}
|
||||||
|
})
|
||||||
|
(function (exporter) {
|
||||||
|
var extendStatics = Object.setPrototypeOf ||
|
||||||
|
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||||
|
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
||||||
|
|
||||||
|
__extends = function (d, b) {
|
||||||
|
if (typeof b !== "function" && b !== null)
|
||||||
|
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
||||||
|
extendStatics(d, b);
|
||||||
|
function __() { this.constructor = d; }
|
||||||
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||||
|
};
|
||||||
|
|
||||||
|
__assign = Object.assign || function (t) {
|
||||||
|
for (var s, i = 1, n = arguments.length; i < n; i++) {
|
||||||
|
s = arguments[i];
|
||||||
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p];
|
||||||
|
}
|
||||||
|
return t;
|
||||||
|
};
|
||||||
|
|
||||||
|
__rest = function (s, e) {
|
||||||
|
var t = {};
|
||||||
|
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||||
|
t[p] = s[p];
|
||||||
|
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||||
|
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||||
|
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||||
|
t[p[i]] = s[p[i]];
|
||||||
|
}
|
||||||
|
return t;
|
||||||
|
};
|
||||||
|
|
||||||
|
__decorate = function (decorators, target, key, desc) {
|
||||||
|
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
||||||
|
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
||||||
|
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
||||||
|
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
||||||
|
};
|
||||||
|
|
||||||
|
__param = function (paramIndex, decorator) {
|
||||||
|
return function (target, key) { decorator(target, key, paramIndex); }
|
||||||
|
};
|
||||||
|
|
||||||
|
__metadata = function (metadataKey, metadataValue) {
|
||||||
|
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue);
|
||||||
|
};
|
||||||
|
|
||||||
|
__awaiter = function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
__generator = function (thisArg, body) {
|
||||||
|
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||||
|
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||||
|
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||||
|
function step(op) {
|
||||||
|
if (f) throw new TypeError("Generator is already executing.");
|
||||||
|
while (_) try {
|
||||||
|
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||||
|
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||||
|
switch (op[0]) {
|
||||||
|
case 0: case 1: t = op; break;
|
||||||
|
case 4: _.label++; return { value: op[1], done: false };
|
||||||
|
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||||
|
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||||
|
default:
|
||||||
|
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||||
|
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||||
|
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||||
|
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||||
|
if (t[2]) _.ops.pop();
|
||||||
|
_.trys.pop(); continue;
|
||||||
|
}
|
||||||
|
op = body.call(thisArg, _);
|
||||||
|
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||||
|
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
__exportStar = function(m, o) {
|
||||||
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p);
|
||||||
|
};
|
||||||
|
|
||||||
|
__createBinding = Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
});
|
||||||
|
|
||||||
|
__values = function (o) {
|
||||||
|
var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
|
||||||
|
if (m) return m.call(o);
|
||||||
|
if (o && typeof o.length === "number") return {
|
||||||
|
next: function () {
|
||||||
|
if (o && i >= o.length) o = void 0;
|
||||||
|
return { value: o && o[i++], done: !o };
|
||||||
|
}
|
||||||
|
};
|
||||||
|
throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
|
||||||
|
};
|
||||||
|
|
||||||
|
__read = function (o, n) {
|
||||||
|
var m = typeof Symbol === "function" && o[Symbol.iterator];
|
||||||
|
if (!m) return o;
|
||||||
|
var i = m.call(o), r, ar = [], e;
|
||||||
|
try {
|
||||||
|
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
|
||||||
|
}
|
||||||
|
catch (error) { e = { error: error }; }
|
||||||
|
finally {
|
||||||
|
try {
|
||||||
|
if (r && !r.done && (m = i["return"])) m.call(i);
|
||||||
|
}
|
||||||
|
finally { if (e) throw e.error; }
|
||||||
|
}
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @deprecated */
|
||||||
|
__spread = function () {
|
||||||
|
for (var ar = [], i = 0; i < arguments.length; i++)
|
||||||
|
ar = ar.concat(__read(arguments[i]));
|
||||||
|
return ar;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @deprecated */
|
||||||
|
__spreadArrays = function () {
|
||||||
|
for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length;
|
||||||
|
for (var r = Array(s), k = 0, i = 0; i < il; i++)
|
||||||
|
for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++)
|
||||||
|
r[k] = a[j];
|
||||||
|
return r;
|
||||||
|
};
|
||||||
|
|
||||||
|
__spreadArray = function (to, from, pack) {
|
||||||
|
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
||||||
|
if (ar || !(i in from)) {
|
||||||
|
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
||||||
|
ar[i] = from[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return to.concat(ar || Array.prototype.slice.call(from));
|
||||||
|
};
|
||||||
|
|
||||||
|
__await = function (v) {
|
||||||
|
return this instanceof __await ? (this.v = v, this) : new __await(v);
|
||||||
|
};
|
||||||
|
|
||||||
|
__asyncGenerator = function (thisArg, _arguments, generator) {
|
||||||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||||
|
var g = generator.apply(thisArg, _arguments || []), i, q = [];
|
||||||
|
return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i;
|
||||||
|
function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; }
|
||||||
|
function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } }
|
||||||
|
function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); }
|
||||||
|
function fulfill(value) { resume("next", value); }
|
||||||
|
function reject(value) { resume("throw", value); }
|
||||||
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||||
|
};
|
||||||
|
|
||||||
|
__asyncDelegator = function (o) {
|
||||||
|
var i, p;
|
||||||
|
return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i;
|
||||||
|
function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; }
|
||||||
|
};
|
||||||
|
|
||||||
|
__asyncValues = function (o) {
|
||||||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||||
|
var m = o[Symbol.asyncIterator], i;
|
||||||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
|
};
|
||||||
|
|
||||||
|
__makeTemplateObject = function (cooked, raw) {
|
||||||
|
if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; }
|
||||||
|
return cooked;
|
||||||
|
};
|
||||||
|
|
||||||
|
var __setModuleDefault = Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
};
|
||||||
|
|
||||||
|
__importStar = function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
|
||||||
|
__importDefault = function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
|
|
||||||
|
__classPrivateFieldGet = function (receiver, state, kind, f) {
|
||||||
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||||||
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||||||
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||||||
|
};
|
||||||
|
|
||||||
|
__classPrivateFieldSet = function (receiver, state, value, kind, f) {
|
||||||
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
||||||
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||||||
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||||||
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||||||
|
};
|
||||||
|
|
||||||
|
exporter("__extends", __extends);
|
||||||
|
exporter("__assign", __assign);
|
||||||
|
exporter("__rest", __rest);
|
||||||
|
exporter("__decorate", __decorate);
|
||||||
|
exporter("__param", __param);
|
||||||
|
exporter("__metadata", __metadata);
|
||||||
|
exporter("__awaiter", __awaiter);
|
||||||
|
exporter("__generator", __generator);
|
||||||
|
exporter("__exportStar", __exportStar);
|
||||||
|
exporter("__createBinding", __createBinding);
|
||||||
|
exporter("__values", __values);
|
||||||
|
exporter("__read", __read);
|
||||||
|
exporter("__spread", __spread);
|
||||||
|
exporter("__spreadArrays", __spreadArrays);
|
||||||
|
exporter("__spreadArray", __spreadArray);
|
||||||
|
exporter("__await", __await);
|
||||||
|
exporter("__asyncGenerator", __asyncGenerator);
|
||||||
|
exporter("__asyncDelegator", __asyncDelegator);
|
||||||
|
exporter("__asyncValues", __asyncValues);
|
||||||
|
exporter("__makeTemplateObject", __makeTemplateObject);
|
||||||
|
exporter("__importStar", __importStar);
|
||||||
|
exporter("__importDefault", __importDefault);
|
||||||
|
exporter("__classPrivateFieldGet", __classPrivateFieldGet);
|
||||||
|
exporter("__classPrivateFieldSet", __classPrivateFieldSet);
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
/* 641 */,
|
/* 641 */,
|
||||||
/* 642 */,
|
/* 642 */,
|
||||||
/* 643 */,
|
/* 643 */,
|
||||||
@ -48759,7 +49014,17 @@ function run() {
|
|||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||||
|
//Check if user wants to save cache despite of failure in any previous job
|
||||||
|
const saveCache = core.getBooleanInput(constants_1.Inputs.SaveOnAnyFailure);
|
||||||
|
if (saveCache == true) {
|
||||||
|
core.debug(`Exporting environment variable ${constants_1.Variables.SaveCacheOnAnyFailure}`);
|
||||||
|
core.exportVariable(constants_1.Variables.SaveCacheOnAnyFailure, saveCache);
|
||||||
|
core.info(`Input Variable ${constants_1.Variables.SaveCacheOnAnyFailure} is set to true, the cache will be saved despite of any failure in the build.`);
|
||||||
|
}
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
|
if (core.getBooleanInput(constants_1.Inputs.FailOnCacheMiss) == true) {
|
||||||
|
throw new Error(`Cache with the given input key ${primaryKey} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`);
|
||||||
|
}
|
||||||
core.info(`Cache not found for input keys: ${[
|
core.info(`Cache not found for input keys: ${[
|
||||||
primaryKey,
|
primaryKey,
|
||||||
...restoreKeys
|
...restoreKeys
|
||||||
@ -48770,6 +49035,10 @@ function run() {
|
|||||||
utils.setCacheState(cacheKey);
|
utils.setCacheState(cacheKey);
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
if (!isExactKeyMatch &&
|
||||||
|
core.getBooleanInput(constants_1.Inputs.FailOnCacheMiss) == true) {
|
||||||
|
throw new Error(`Restored cache key doesn't match the given input key ${primaryKey}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`);
|
||||||
|
}
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
@ -53237,11 +53506,6 @@ var CompressionMethod;
|
|||||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||||
CompressionMethod["Zstd"] = "zstd";
|
CompressionMethod["Zstd"] = "zstd";
|
||||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||||
var ArchiveToolType;
|
|
||||||
(function (ArchiveToolType) {
|
|
||||||
ArchiveToolType["GNU"] = "gnu";
|
|
||||||
ArchiveToolType["BSD"] = "bsd";
|
|
||||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
|
||||||
// The default number of retry attempts.
|
// The default number of retry attempts.
|
||||||
exports.DefaultRetryAttempts = 2;
|
exports.DefaultRetryAttempts = 2;
|
||||||
// The default delay in milliseconds between retry attempts.
|
// The default delay in milliseconds between retry attempts.
|
||||||
@ -53250,12 +53514,6 @@ exports.DefaultRetryDelay = 5000;
|
|||||||
// over the socket during this period, the socket is destroyed and the download
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
// is aborted.
|
// is aborted.
|
||||||
exports.SocketTimeout = 5000;
|
exports.SocketTimeout = 5000;
|
||||||
// The default path of GNUtar on hosted Windows runners
|
|
||||||
exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`;
|
|
||||||
// The default path of BSDtar on hosted Windows runners
|
|
||||||
exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`;
|
|
||||||
exports.TarFilename = 'cache.tar';
|
|
||||||
exports.ManifestFilename = 'manifest.txt';
|
|
||||||
//# sourceMappingURL=constants.js.map
|
//# sourceMappingURL=constants.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
61367
dist/save-only/index.js
vendored
Normal file
61367
dist/save-only/index.js
vendored
Normal file
File diff suppressed because one or more lines are too long
756
dist/save/index.js
vendored
756
dist/save/index.js
vendored
File diff suppressed because it is too large
Load Diff
6495
package-lock.json
generated
6495
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
30
package.json
30
package.json
@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.1.0-beta.1",
|
"version": "3.0.11",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
|
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/save-only src/save-only.ts",
|
||||||
"test": "tsc --noEmit && jest --coverage",
|
"test": "tsc --noEmit && jest --coverage",
|
||||||
"lint": "eslint **/*.ts --cache",
|
"lint": "eslint **/*.ts --cache",
|
||||||
"format": "prettier --write **/*.ts",
|
"format": "prettier --write **/*.ts",
|
||||||
@ -23,29 +23,29 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "3.1.0-beta.1",
|
"@actions/cache": "^3.0.5",
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^27.5.2",
|
"@types/jest": "^27.5.0",
|
||||||
"@types/nock": "^11.1.0",
|
"@types/nock": "^11.1.0",
|
||||||
"@types/node": "^16.18.3",
|
"@types/node": "^16.11.33",
|
||||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
"@typescript-eslint/eslint-plugin": "^5.22.0",
|
||||||
"@typescript-eslint/parser": "^5.45.0",
|
"@typescript-eslint/parser": "^5.22.0",
|
||||||
"@zeit/ncc": "^0.20.5",
|
"@zeit/ncc": "^0.20.5",
|
||||||
"eslint": "^8.28.0",
|
"eslint": "^8.14.0",
|
||||||
"eslint-config-prettier": "^8.5.0",
|
"eslint-config-prettier": "^8.5.0",
|
||||||
"eslint-plugin-import": "^2.26.0",
|
"eslint-plugin-import": "^2.26.0",
|
||||||
"eslint-plugin-jest": "^26.9.0",
|
"eslint-plugin-jest": "^26.1.5",
|
||||||
"eslint-plugin-prettier": "^4.2.1",
|
"eslint-plugin-prettier": "^4.0.0",
|
||||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||||
"jest": "^28.1.3",
|
"jest": "^28.0.3",
|
||||||
"jest-circus": "^27.5.1",
|
"jest-circus": "^27.5.1",
|
||||||
"nock": "^13.2.9",
|
"nock": "^13.2.4",
|
||||||
"prettier": "^2.8.0",
|
"prettier": "^2.6.2",
|
||||||
"ts-jest": "^28.0.8",
|
"ts-jest": "^28.0.2",
|
||||||
"typescript": "^4.9.3"
|
"typescript": "^4.6.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
27
restore/action.yml
Normal file
27
restore/action.yml
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
name: 'Restore Cache'
|
||||||
|
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||||
|
author: 'GitHub'
|
||||||
|
inputs:
|
||||||
|
path:
|
||||||
|
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
||||||
|
required: true
|
||||||
|
key:
|
||||||
|
description: 'An explicit key for restoring and saving the cache'
|
||||||
|
required: true
|
||||||
|
restore-keys:
|
||||||
|
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||||
|
required: false
|
||||||
|
exit-on-cache-miss:
|
||||||
|
description: 'Fail the workflow if the cache is not found for the primary key'
|
||||||
|
required: false
|
||||||
|
default: false
|
||||||
|
outputs:
|
||||||
|
cache-hit:
|
||||||
|
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||||
|
runs:
|
||||||
|
using: 'node16'
|
||||||
|
main: '../dist/restore/index.js'
|
||||||
|
branding:
|
||||||
|
icon: 'archive'
|
||||||
|
color: 'gray-dark'
|
||||||
|
|
19
save/action.yml
Normal file
19
save/action.yml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
name: 'Save Cache'
|
||||||
|
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||||
|
author: 'GitHub'
|
||||||
|
inputs:
|
||||||
|
path:
|
||||||
|
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
||||||
|
required: true
|
||||||
|
key:
|
||||||
|
description: 'An explicit key for restoring and saving the cache'
|
||||||
|
required: true
|
||||||
|
upload-chunk-size:
|
||||||
|
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||||
|
required: false
|
||||||
|
runs:
|
||||||
|
using: 'node16'
|
||||||
|
main: '../dist/save/index.js'
|
||||||
|
branding:
|
||||||
|
icon: 'archive'
|
||||||
|
color: 'gray-dark'
|
@ -2,7 +2,9 @@ export enum Inputs {
|
|||||||
Key = "key",
|
Key = "key",
|
||||||
Path = "path",
|
Path = "path",
|
||||||
RestoreKeys = "restore-keys",
|
RestoreKeys = "restore-keys",
|
||||||
UploadChunkSize = "upload-chunk-size"
|
UploadChunkSize = "upload-chunk-size",
|
||||||
|
FailOnCacheMiss = "fail-on-cache-miss",
|
||||||
|
SaveOnAnyFailure = "save-on-any-failure"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum Outputs {
|
export enum Outputs {
|
||||||
@ -20,4 +22,8 @@ export enum Events {
|
|||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum Variables {
|
||||||
|
SaveCacheOnAnyFailure = "SAVE_CACHE_ON_ANY_FAILURE"
|
||||||
|
}
|
||||||
|
|
||||||
export const RefKey = "GITHUB_REF";
|
export const RefKey = "GITHUB_REF";
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import * as cache from "@actions/cache";
|
import * as cache from "@actions/cache";
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State, Variables } from "./constants";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
async function run(): Promise<void> {
|
async function run(): Promise<void> {
|
||||||
@ -35,22 +35,46 @@ async function run(): Promise<void> {
|
|||||||
restoreKeys
|
restoreKeys
|
||||||
);
|
);
|
||||||
|
|
||||||
|
//Check if user wants to save cache despite of failure in any previous job
|
||||||
|
const saveCache = core.getBooleanInput(Inputs.SaveOnAnyFailure);
|
||||||
|
if (saveCache == true) {
|
||||||
|
core.debug(
|
||||||
|
`Exporting environment variable ${Variables.SaveCacheOnAnyFailure}`
|
||||||
|
);
|
||||||
|
core.exportVariable(Variables.SaveCacheOnAnyFailure, saveCache);
|
||||||
|
core.info(
|
||||||
|
`Input Variable ${Variables.SaveCacheOnAnyFailure} is set to true, the cache will be saved despite of any failure in the build.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
|
if (core.getBooleanInput(Inputs.FailOnCacheMiss) == true) {
|
||||||
|
throw new Error(
|
||||||
|
`Cache with the given input key ${primaryKey} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||||
|
);
|
||||||
|
}
|
||||||
core.info(
|
core.info(
|
||||||
`Cache not found for input keys: ${[
|
`Cache not found for input keys: ${[
|
||||||
primaryKey,
|
primaryKey,
|
||||||
...restoreKeys
|
...restoreKeys
|
||||||
].join(", ")}`
|
].join(", ")}`
|
||||||
);
|
);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store the matched cache key
|
// Store the matched cache key
|
||||||
utils.setCacheState(cacheKey);
|
utils.setCacheState(cacheKey);
|
||||||
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
|
||||||
|
if (
|
||||||
|
!isExactKeyMatch &&
|
||||||
|
core.getBooleanInput(Inputs.FailOnCacheMiss) == true
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Restored cache key doesn't match the given input key ${primaryKey}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||||
|
);
|
||||||
|
}
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
core.setFailed((error as Error).message);
|
core.setFailed((error as Error).message);
|
||||||
|
20
src/save-only.ts
Normal file
20
src/save-only.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
|
import { Inputs } from "./constants";
|
||||||
|
import save from "./save";
|
||||||
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
async function runSaveAction(): Promise<void> {
|
||||||
|
if (!core.getInput(Inputs.Key)) {
|
||||||
|
utils.logWarning(`Error retrieving key from inputs.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
saveOnly = true;
|
||||||
|
|
||||||
|
await save();
|
||||||
|
}
|
||||||
|
|
||||||
|
runSaveAction();
|
||||||
|
|
||||||
|
export default runSaveAction;
|
||||||
|
export let saveOnly: boolean;
|
@ -2,6 +2,7 @@ import * as cache from "@actions/cache";
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
|
import { saveOnly } from "./save-only";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||||
@ -27,7 +28,11 @@ async function run(): Promise<void> {
|
|||||||
const state = utils.getCacheState();
|
const state = utils.getCacheState();
|
||||||
|
|
||||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
const primaryKey =
|
||||||
|
saveOnly === true
|
||||||
|
? core.getInput(Inputs.Key)
|
||||||
|
: core.getState(State.CachePrimaryKey);
|
||||||
|
|
||||||
if (!primaryKey) {
|
if (!primaryKey) {
|
||||||
utils.logWarning(`Error retrieving key from state.`);
|
utils.logWarning(`Error retrieving key from state.`);
|
||||||
return;
|
return;
|
||||||
@ -56,6 +61,4 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
run();
|
|
||||||
|
|
||||||
export default run;
|
export default run;
|
||||||
|
@ -77,20 +77,19 @@ export function getInputAsInt(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function isCacheFeatureAvailable(): boolean {
|
export function isCacheFeatureAvailable(): boolean {
|
||||||
if (cache.isFeatureAvailable()) {
|
if (!cache.isFeatureAvailable()) {
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (isGhes()) {
|
if (isGhes()) {
|
||||||
logWarning(
|
logWarning(
|
||||||
`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
|
`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
|
||||||
Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`
|
Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`
|
||||||
);
|
);
|
||||||
return false;
|
} else {
|
||||||
}
|
|
||||||
|
|
||||||
logWarning(
|
logWarning(
|
||||||
"An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."
|
"An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."
|
||||||
);
|
);
|
||||||
|
}
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -13,18 +13,28 @@ interface CacheInput {
|
|||||||
path: string;
|
path: string;
|
||||||
key: string;
|
key: string;
|
||||||
restoreKeys?: string[];
|
restoreKeys?: string[];
|
||||||
|
failOnCacheMiss?: boolean;
|
||||||
|
saveOnAnyFailure?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setInputs(input: CacheInput): void {
|
export function setInputs(input: CacheInput): void {
|
||||||
setInput(Inputs.Path, input.path);
|
setInput(Inputs.Path, input.path);
|
||||||
setInput(Inputs.Key, input.key);
|
setInput(Inputs.Key, input.key);
|
||||||
|
setInput(Inputs.SaveOnAnyFailure, "false");
|
||||||
|
setInput(Inputs.FailOnCacheMiss, "false");
|
||||||
input.restoreKeys &&
|
input.restoreKeys &&
|
||||||
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
||||||
|
input.failOnCacheMiss &&
|
||||||
|
setInput(Inputs.FailOnCacheMiss, String(input.failOnCacheMiss));
|
||||||
|
input.saveOnAnyFailure &&
|
||||||
|
setInput(Inputs.SaveOnAnyFailure, String(input.saveOnAnyFailure));
|
||||||
}
|
}
|
||||||
|
|
||||||
export function clearInputs(): void {
|
export function clearInputs(): void {
|
||||||
delete process.env[getInputName(Inputs.Path)];
|
delete process.env[getInputName(Inputs.Path)];
|
||||||
delete process.env[getInputName(Inputs.Key)];
|
delete process.env[getInputName(Inputs.Key)];
|
||||||
delete process.env[getInputName(Inputs.RestoreKeys)];
|
delete process.env[getInputName(Inputs.RestoreKeys)];
|
||||||
|
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
||||||
|
delete process.env[getInputName(Inputs.SaveOnAnyFailure)];
|
||||||
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user