mirror of
https://github.com/actions/cache.git
synced 2025-06-26 20:31:15 +02:00
Compare commits
42 Commits
bishal/rea
...
tanuj077/c
Author | SHA1 | Date | |
---|---|---|---|
0685539942 | |||
a92fb881ae | |||
5f3ddebb2f | |||
8a88690a20 | |||
6e2c6a5916 | |||
2c9fb32186 | |||
01d96636a0 | |||
9c5a42a7c9 | |||
b4ac56fa43 | |||
24f54d74c0 | |||
8d99052cbc | |||
58a0402e7d | |||
a172494938 | |||
f8717682fb | |||
af1210e2a3 | |||
ab0e7714ce | |||
fb4a5dce60 | |||
71334c58b2 | |||
888d454557 | |||
dddd7ce07c | |||
abddc4dd44 | |||
921c58ee44 | |||
7f45813c72 | |||
0769f2e443 | |||
f8116c8945 | |||
5fe0b944ef | |||
2a6cd14175 | |||
651d82954c | |||
cfa1b7695d | |||
fa8856311e | |||
ef145dd134 | |||
69b8227b27 | |||
6babf202a4 | |||
5c79b3fd6c | |||
3ff04605d3 | |||
db8d946d18 | |||
0e93819da5 | |||
9a6a4f6079 | |||
515d10b4fd | |||
669e7536d9 | |||
29dbbce762 | |||
ea5981db97 |
14
.devcontainer/devcontainer.json
Normal file
14
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"name": "Node.js & TypeScript",
|
||||
"image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
|
||||
// Features to add to the dev container. More info: https://containers.dev/implementors/features.
|
||||
// "features": {},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "npm install && npm run build"
|
||||
// Configure tool-specific properties.
|
||||
// "customizations": {},
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
21
.github/auto_assign.yml
vendored
21
.github/auto_assign.yml
vendored
@ -1,21 +0,0 @@
|
||||
# Set to true to add reviewers to pull requests
|
||||
addReviewers: true
|
||||
|
||||
# Set to true to add assignees to pull requests
|
||||
addAssignees: false
|
||||
|
||||
# A list of reviewers to be added to pull requests (GitHub user name)
|
||||
reviewers:
|
||||
- anuragc617
|
||||
- pallavx
|
||||
- pdotl
|
||||
- phantsure
|
||||
- kotewar
|
||||
- aparna-ravindra
|
||||
- tiwarishub
|
||||
- vsvipul
|
||||
- bishal-pdmsft
|
||||
|
||||
# A number of reviewers added to the pull request
|
||||
# Set 0 to add all the reviewers (default: 0)
|
||||
numberOfReviewers: 1
|
20
.github/workflows/add-reviewer-pr.yml
vendored
Normal file
20
.github/workflows/add-reviewer-pr.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: Add Reviewer PR
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
jobs:
|
||||
run-action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get current oncall
|
||||
id: oncall
|
||||
run: |
|
||||
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Request Review
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/pulls/${{ github.event.pull_request.number}}/requested_reviewers -d '{"reviewers":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- name: Add Assignee
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
16
.github/workflows/assign-issue.yml
vendored
Normal file
16
.github/workflows/assign-issue.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: Assign issue
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
jobs:
|
||||
run-action:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get current oncall
|
||||
id: oncall
|
||||
run: |
|
||||
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: add_assignees
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
15
.github/workflows/auto-assign-issues.yml
vendored
15
.github/workflows/auto-assign-issues.yml
vendored
@ -1,15 +0,0 @@
|
||||
name: Issue assignment
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
auto-assign:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: 'Auto-assign issue'
|
||||
uses: pozil/auto-assign-issue@v1.4.0
|
||||
with:
|
||||
assignees: anuragc617,pallavx,pdotl,phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
|
||||
numOfAssignee: 1
|
10
.github/workflows/auto-assign.yml
vendored
10
.github/workflows/auto-assign.yml
vendored
@ -1,10 +0,0 @@
|
||||
name: 'Auto Assign'
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, ready_for_review]
|
||||
|
||||
jobs:
|
||||
add-reviews:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: kentaro-m/auto-assign-action@v1.2.1
|
13
README.md
13
README.md
@ -37,7 +37,7 @@ If you are using this inside a container, a POSIX-compliant `tar` needs to be in
|
||||
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
|
||||
|
||||
#### Environment Variables
|
||||
* `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `60`) to abort download of the segment if not completed in the defined number of minutes. [Read more](#cache-segment-restore-timeout)
|
||||
* `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `60`) to abort download of the segment if not completed in the defined number of minutes. [Read more](https://github.com/actions/cache/blob/main/workarounds.md#cache-segment-restore-timeout)
|
||||
|
||||
### Outputs
|
||||
|
||||
@ -118,7 +118,7 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
||||
|
||||
A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions.
|
||||
|
||||
For example, using the [`hashFiles`](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles) function allows you to create a new cache when dependencies change.
|
||||
For example, using the [`hashFiles`](https://docs.github.com/en/actions/learn-github-actions/expressions#hashfiles) function allows you to create a new cache when dependencies change.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v3
|
||||
@ -233,10 +233,11 @@ jobs:
|
||||
## Known practices and workarounds
|
||||
Following are some of the known practices/workarounds which community has used to fulfill specific requirements. You may choose to use them if suits your use case. Note these are not necessarily the only or the recommended solution.
|
||||
|
||||
- [Cache segment restore timeout](./workarounds.md#cache-segment-restore-timeout)
|
||||
- [Update a cache](./workarounds.md#update-a-cache)
|
||||
- [Use cache across feature branches](./workarounds.md#use-cache-across-feature-branches)
|
||||
- [Improving cache restore performance on Windows/Using cross-os caching](./workarounds.md#improving-cache-restore-performance-on-windows-using-cross-os-caching)
|
||||
- [Cache segment restore timeout](./tips-and-workarounds.md#cache-segment-restore-timeout)
|
||||
- [Update a cache](./tips-and-workarounds.md#update-a-cache)
|
||||
- [Use cache across feature branches](./tips-and-workarounds.md#use-cache-across-feature-branches)
|
||||
- [Improving cache restore performance on Windows/Using cross-os caching](./tips-and-workarounds.md#improving-cache-restore-performance-on-windows-using-cross-os-caching)
|
||||
- [Force deletion of caches overriding default cache eviction policy](./tips-and-workarounds.md#force-deletion-of-caches-overriding-default-cache-eviction-policy)
|
||||
|
||||
#### Windows environment variables
|
||||
Please note that Windows environment variables (like `%LocalAppData%`) will NOT be expanded by this action. Instead, prefer using `~` in your paths which will expand to HOME directory. For example, instead of `%LocalAppData%`, use `~\AppData\Local`. For a list of supported default environment variables, see [this](https://docs.github.com/en/actions/learn-github-actions/environment-variables) page.
|
||||
|
@ -324,3 +324,113 @@ test("restore with cache found for restore key", async () => {
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with enabling save on any failure feature", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey],
|
||||
saveOnAnyFailure: true
|
||||
});
|
||||
|
||||
const debugMock = jest.spyOn(core, "debug");
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
|
||||
expect(debugMock).toHaveBeenCalledWith(
|
||||
`Exporting environment variable SAVE_CACHE_ON_ANY_FAILURE`
|
||||
);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Input Variable SAVE_CACHE_ON_ANY_FAILURE is set to true, the cache will be saved despite of any failure in the build.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("Fail restore when fail on cache miss is enabled and primary key not found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey],
|
||||
failOnCacheMiss: true
|
||||
});
|
||||
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Cache with the given input key ${key} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("Fail restore when fail on cache miss is enabled and primary key doesn't match restored key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey],
|
||||
failOnCacheMiss: true
|
||||
});
|
||||
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Restored cache key doesn't match the given input key ${key}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
165
__tests__/save-only.test.ts
Normal file
165
__tests__/save-only.test.ts
Normal file
@ -0,0 +1,165 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/save-only";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("@actions/core");
|
||||
jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => {
|
||||
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsInt").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsInt(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.isExactKeyMatch(key, cacheResult);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
|
||||
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => false);
|
||||
jest.spyOn(actionUtils, "isCacheFeatureAvailable").mockImplementation(
|
||||
() => true
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("save cache when save-only is required", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getInput")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, {
|
||||
uploadChunkSize: 4000000
|
||||
});
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save when save on any failure is true", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const primaryKey = "Linux-node-";
|
||||
const inputPath = "node_modules";
|
||||
|
||||
jest.spyOn(core, "getInput")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
|
||||
testUtils.setInput(Inputs.SaveOnAnyFailure, "true");
|
||||
|
||||
const cacheId = 4;
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with no primary key in input outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key
|
||||
.mockImplementationOnce(() => {
|
||||
return "";
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Error retrieving key from inputs.`
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
10
action.yml
10
action.yml
@ -14,6 +14,14 @@ inputs:
|
||||
upload-chunk-size:
|
||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||
required: false
|
||||
exit-on-cache-miss:
|
||||
description: 'Fail the workflow if the cache is not found for the primary key'
|
||||
required: false
|
||||
default: false
|
||||
save-on-any-failure:
|
||||
description: 'Save cache (on cache miss) despite of any failure during the workflow run'
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
@ -21,7 +29,7 @@ runs:
|
||||
using: 'node16'
|
||||
main: 'dist/restore/index.js'
|
||||
post: 'dist/save/index.js'
|
||||
post-if: 'success()'
|
||||
post-if: (success() || (env.SAVE_CACHE_ON_ANY_FAILURE == 'yes'))
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
||||
|
184
dist/restore/index.js
vendored
184
dist/restore/index.js
vendored
@ -1892,10 +1892,10 @@ function serial(list, iterator, callback)
|
||||
module.exports = minimatch
|
||||
minimatch.Minimatch = Minimatch
|
||||
|
||||
var path = { sep: '/' }
|
||||
try {
|
||||
path = __webpack_require__(622)
|
||||
} catch (er) {}
|
||||
var path = (function () { try { return __webpack_require__(622) } catch (e) {}}()) || {
|
||||
sep: '/'
|
||||
}
|
||||
minimatch.sep = path.sep
|
||||
|
||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||
var expand = __webpack_require__(306)
|
||||
@ -1947,43 +1947,64 @@ function filter (pattern, options) {
|
||||
}
|
||||
|
||||
function ext (a, b) {
|
||||
a = a || {}
|
||||
b = b || {}
|
||||
var t = {}
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
Object.keys(a).forEach(function (k) {
|
||||
t[k] = a[k]
|
||||
})
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
return t
|
||||
}
|
||||
|
||||
minimatch.defaults = function (def) {
|
||||
if (!def || !Object.keys(def).length) return minimatch
|
||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
||||
return minimatch
|
||||
}
|
||||
|
||||
var orig = minimatch
|
||||
|
||||
var m = function minimatch (p, pattern, options) {
|
||||
return orig.minimatch(p, pattern, ext(def, options))
|
||||
return orig(p, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.Minimatch = function Minimatch (pattern, options) {
|
||||
return new orig.Minimatch(pattern, ext(def, options))
|
||||
}
|
||||
m.Minimatch.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options)).Minimatch
|
||||
}
|
||||
|
||||
m.filter = function filter (pattern, options) {
|
||||
return orig.filter(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options))
|
||||
}
|
||||
|
||||
m.makeRe = function makeRe (pattern, options) {
|
||||
return orig.makeRe(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.braceExpand = function braceExpand (pattern, options) {
|
||||
return orig.braceExpand(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.match = function (list, pattern, options) {
|
||||
return orig.match(list, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
Minimatch.defaults = function (def) {
|
||||
if (!def || !Object.keys(def).length) return Minimatch
|
||||
return minimatch.defaults(def).Minimatch
|
||||
}
|
||||
|
||||
function minimatch (p, pattern, options) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('glob pattern string required')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
@ -1992,9 +2013,6 @@ function minimatch (p, pattern, options) {
|
||||
return false
|
||||
}
|
||||
|
||||
// "" only matches ""
|
||||
if (pattern.trim() === '') return p === ''
|
||||
|
||||
return new Minimatch(pattern, options).match(p)
|
||||
}
|
||||
|
||||
@ -2003,15 +2021,14 @@ function Minimatch (pattern, options) {
|
||||
return new Minimatch(pattern, options)
|
||||
}
|
||||
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('glob pattern string required')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
pattern = pattern.trim()
|
||||
|
||||
// windows support: need to use /, not \
|
||||
if (path.sep !== '/') {
|
||||
if (!options.allowWindowsEscape && path.sep !== '/') {
|
||||
pattern = pattern.split(path.sep).join('/')
|
||||
}
|
||||
|
||||
@ -2022,6 +2039,7 @@ function Minimatch (pattern, options) {
|
||||
this.negate = false
|
||||
this.comment = false
|
||||
this.empty = false
|
||||
this.partial = !!options.partial
|
||||
|
||||
// make the set of regexps etc.
|
||||
this.make()
|
||||
@ -2031,9 +2049,6 @@ Minimatch.prototype.debug = function () {}
|
||||
|
||||
Minimatch.prototype.make = make
|
||||
function make () {
|
||||
// don't do it more than once.
|
||||
if (this._made) return
|
||||
|
||||
var pattern = this.pattern
|
||||
var options = this.options
|
||||
|
||||
@ -2053,7 +2068,7 @@ function make () {
|
||||
// step 2: expand braces
|
||||
var set = this.globSet = this.braceExpand()
|
||||
|
||||
if (options.debug) this.debug = console.error
|
||||
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
@ -2133,12 +2148,11 @@ function braceExpand (pattern, options) {
|
||||
pattern = typeof pattern === 'undefined'
|
||||
? this.pattern : pattern
|
||||
|
||||
if (typeof pattern === 'undefined') {
|
||||
throw new TypeError('undefined pattern')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (options.nobrace ||
|
||||
!pattern.match(/\{.*\}/)) {
|
||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
||||
// improving this regexp to avoid a ReDOS vulnerability.
|
||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||||
// shortcut. no need to expand.
|
||||
return [pattern]
|
||||
}
|
||||
@ -2146,6 +2160,17 @@ function braceExpand (pattern, options) {
|
||||
return expand(pattern)
|
||||
}
|
||||
|
||||
var MAX_PATTERN_LENGTH = 1024 * 64
|
||||
var assertValidPattern = function (pattern) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('invalid pattern')
|
||||
}
|
||||
|
||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
}
|
||||
|
||||
// parse a component of the expanded set.
|
||||
// At this point, no pattern may contain "/" in it
|
||||
// so we're going to return a 2d array, where each entry is the full
|
||||
@ -2160,14 +2185,17 @@ function braceExpand (pattern, options) {
|
||||
Minimatch.prototype.parse = parse
|
||||
var SUBPARSE = {}
|
||||
function parse (pattern, isSub) {
|
||||
if (pattern.length > 1024 * 64) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
var options = this.options
|
||||
|
||||
// shortcuts
|
||||
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||||
if (pattern === '**') {
|
||||
if (!options.noglobstar)
|
||||
return GLOBSTAR
|
||||
else
|
||||
pattern = '*'
|
||||
}
|
||||
if (pattern === '') return ''
|
||||
|
||||
var re = ''
|
||||
@ -2223,10 +2251,12 @@ function parse (pattern, isSub) {
|
||||
}
|
||||
|
||||
switch (c) {
|
||||
case '/':
|
||||
/* istanbul ignore next */
|
||||
case '/': {
|
||||
// completely not allowed, even escaped.
|
||||
// Should already be path-split by now.
|
||||
return false
|
||||
}
|
||||
|
||||
case '\\':
|
||||
clearStateChar()
|
||||
@ -2345,25 +2375,23 @@ function parse (pattern, isSub) {
|
||||
|
||||
// handle the case where we left a class open.
|
||||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||
if (inClass) {
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
|
||||
// finish up the class.
|
||||
@ -2447,9 +2475,7 @@ function parse (pattern, isSub) {
|
||||
// something that could conceivably capture a dot
|
||||
var addPatternStart = false
|
||||
switch (re.charAt(0)) {
|
||||
case '.':
|
||||
case '[':
|
||||
case '(': addPatternStart = true
|
||||
case '[': case '.': case '(': addPatternStart = true
|
||||
}
|
||||
|
||||
// Hack to work around lack of negative lookbehind in JS
|
||||
@ -2511,7 +2537,7 @@ function parse (pattern, isSub) {
|
||||
var flags = options.nocase ? 'i' : ''
|
||||
try {
|
||||
var regExp = new RegExp('^' + re + '$', flags)
|
||||
} catch (er) {
|
||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
||||
// If it was an invalid regular expression, then it can't match
|
||||
// anything. This trick looks for a character after the end of
|
||||
// the string, which is of course impossible, except in multi-line
|
||||
@ -2569,7 +2595,7 @@ function makeRe () {
|
||||
|
||||
try {
|
||||
this.regexp = new RegExp(re, flags)
|
||||
} catch (ex) {
|
||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
||||
this.regexp = false
|
||||
}
|
||||
return this.regexp
|
||||
@ -2587,8 +2613,8 @@ minimatch.match = function (list, pattern, options) {
|
||||
return list
|
||||
}
|
||||
|
||||
Minimatch.prototype.match = match
|
||||
function match (f, partial) {
|
||||
Minimatch.prototype.match = function match (f, partial) {
|
||||
if (typeof partial === 'undefined') partial = this.partial
|
||||
this.debug('match', f, this.pattern)
|
||||
// short-circuit in the case of busted things.
|
||||
// comments, etc.
|
||||
@ -2670,6 +2696,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
|
||||
// should be impossible.
|
||||
// some invalid regexp stuff in the set.
|
||||
/* istanbul ignore if */
|
||||
if (p === false) return false
|
||||
|
||||
if (p === GLOBSTAR) {
|
||||
@ -2743,6 +2770,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
// no match was found.
|
||||
// However, in partial mode, we can't say this is necessarily over.
|
||||
// If there's more *pattern* left, then
|
||||
/* istanbul ignore if */
|
||||
if (partial) {
|
||||
// ran out of file
|
||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||
@ -2756,11 +2784,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
// patterns with magic have been turned into regexps.
|
||||
var hit
|
||||
if (typeof p === 'string') {
|
||||
if (options.nocase) {
|
||||
hit = f.toLowerCase() === p.toLowerCase()
|
||||
} else {
|
||||
hit = f === p
|
||||
}
|
||||
hit = f === p
|
||||
this.debug('string match', p, f, hit)
|
||||
} else {
|
||||
hit = f.match(p)
|
||||
@ -2791,16 +2815,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
// this is ok if we're doing the match as part of
|
||||
// a glob fs traversal.
|
||||
return partial
|
||||
} else if (pi === pl) {
|
||||
} else /* istanbul ignore else */ if (pi === pl) {
|
||||
// ran out of pattern, still have file left.
|
||||
// this is only acceptable if we're on the very last
|
||||
// empty segment of a file with a trailing slash.
|
||||
// a/* should match a/b/
|
||||
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||||
return emptyFileEnd
|
||||
return (fi === fl - 1) && (file[fi] === '')
|
||||
}
|
||||
|
||||
// should be unreachable.
|
||||
/* istanbul ignore next */
|
||||
throw new Error('wtf?')
|
||||
}
|
||||
|
||||
@ -4940,13 +4964,15 @@ exports.checkBypass = checkBypass;
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RefKey = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||
exports.RefKey = exports.Variables = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||
var Inputs;
|
||||
(function (Inputs) {
|
||||
Inputs["Key"] = "key";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["RestoreKeys"] = "restore-keys";
|
||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
||||
Inputs["SaveOnAnyFailure"] = "save-on-any-failure";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var Outputs;
|
||||
(function (Outputs) {
|
||||
@ -4963,6 +4989,10 @@ var Events;
|
||||
Events["Push"] = "push";
|
||||
Events["PullRequest"] = "pull_request";
|
||||
})(Events = exports.Events || (exports.Events = {}));
|
||||
var Variables;
|
||||
(function (Variables) {
|
||||
Variables["SaveCacheOnAnyFailure"] = "SAVE_CACHE_ON_ANY_FAILURE";
|
||||
})(Variables = exports.Variables || (exports.Variables = {}));
|
||||
exports.RefKey = "GITHUB_REF";
|
||||
|
||||
|
||||
@ -48984,7 +49014,17 @@ function run() {
|
||||
required: true
|
||||
});
|
||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||
//Check if user wants to save cache despite of failure in any previous job
|
||||
const saveCache = core.getBooleanInput(constants_1.Inputs.SaveOnAnyFailure);
|
||||
if (saveCache == true) {
|
||||
core.debug(`Exporting environment variable ${constants_1.Variables.SaveCacheOnAnyFailure}`);
|
||||
core.exportVariable(constants_1.Variables.SaveCacheOnAnyFailure, saveCache);
|
||||
core.info(`Input Variable ${constants_1.Variables.SaveCacheOnAnyFailure} is set to true, the cache will be saved despite of any failure in the build.`);
|
||||
}
|
||||
if (!cacheKey) {
|
||||
if (core.getBooleanInput(constants_1.Inputs.FailOnCacheMiss) == true) {
|
||||
throw new Error(`Cache with the given input key ${primaryKey} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`);
|
||||
}
|
||||
core.info(`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
@ -48995,6 +49035,10 @@ function run() {
|
||||
utils.setCacheState(cacheKey);
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||
utils.setCacheHitOutput(isExactKeyMatch);
|
||||
if (!isExactKeyMatch &&
|
||||
core.getBooleanInput(constants_1.Inputs.FailOnCacheMiss) == true) {
|
||||
throw new Error(`Restored cache key doesn't match the given input key ${primaryKey}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`);
|
||||
}
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
}
|
||||
catch (error) {
|
||||
|
61367
dist/save-only/index.js
vendored
Normal file
61367
dist/save-only/index.js
vendored
Normal file
File diff suppressed because one or more lines are too long
238
dist/save/index.js
vendored
238
dist/save/index.js
vendored
@ -1892,10 +1892,10 @@ function serial(list, iterator, callback)
|
||||
module.exports = minimatch
|
||||
minimatch.Minimatch = Minimatch
|
||||
|
||||
var path = { sep: '/' }
|
||||
try {
|
||||
path = __webpack_require__(622)
|
||||
} catch (er) {}
|
||||
var path = (function () { try { return __webpack_require__(622) } catch (e) {}}()) || {
|
||||
sep: '/'
|
||||
}
|
||||
minimatch.sep = path.sep
|
||||
|
||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||
var expand = __webpack_require__(306)
|
||||
@ -1947,43 +1947,64 @@ function filter (pattern, options) {
|
||||
}
|
||||
|
||||
function ext (a, b) {
|
||||
a = a || {}
|
||||
b = b || {}
|
||||
var t = {}
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
Object.keys(a).forEach(function (k) {
|
||||
t[k] = a[k]
|
||||
})
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
return t
|
||||
}
|
||||
|
||||
minimatch.defaults = function (def) {
|
||||
if (!def || !Object.keys(def).length) return minimatch
|
||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
||||
return minimatch
|
||||
}
|
||||
|
||||
var orig = minimatch
|
||||
|
||||
var m = function minimatch (p, pattern, options) {
|
||||
return orig.minimatch(p, pattern, ext(def, options))
|
||||
return orig(p, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.Minimatch = function Minimatch (pattern, options) {
|
||||
return new orig.Minimatch(pattern, ext(def, options))
|
||||
}
|
||||
m.Minimatch.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options)).Minimatch
|
||||
}
|
||||
|
||||
m.filter = function filter (pattern, options) {
|
||||
return orig.filter(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options))
|
||||
}
|
||||
|
||||
m.makeRe = function makeRe (pattern, options) {
|
||||
return orig.makeRe(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.braceExpand = function braceExpand (pattern, options) {
|
||||
return orig.braceExpand(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.match = function (list, pattern, options) {
|
||||
return orig.match(list, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
Minimatch.defaults = function (def) {
|
||||
if (!def || !Object.keys(def).length) return Minimatch
|
||||
return minimatch.defaults(def).Minimatch
|
||||
}
|
||||
|
||||
function minimatch (p, pattern, options) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('glob pattern string required')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
@ -1992,9 +2013,6 @@ function minimatch (p, pattern, options) {
|
||||
return false
|
||||
}
|
||||
|
||||
// "" only matches ""
|
||||
if (pattern.trim() === '') return p === ''
|
||||
|
||||
return new Minimatch(pattern, options).match(p)
|
||||
}
|
||||
|
||||
@ -2003,15 +2021,14 @@ function Minimatch (pattern, options) {
|
||||
return new Minimatch(pattern, options)
|
||||
}
|
||||
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('glob pattern string required')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
pattern = pattern.trim()
|
||||
|
||||
// windows support: need to use /, not \
|
||||
if (path.sep !== '/') {
|
||||
if (!options.allowWindowsEscape && path.sep !== '/') {
|
||||
pattern = pattern.split(path.sep).join('/')
|
||||
}
|
||||
|
||||
@ -2022,6 +2039,7 @@ function Minimatch (pattern, options) {
|
||||
this.negate = false
|
||||
this.comment = false
|
||||
this.empty = false
|
||||
this.partial = !!options.partial
|
||||
|
||||
// make the set of regexps etc.
|
||||
this.make()
|
||||
@ -2031,9 +2049,6 @@ Minimatch.prototype.debug = function () {}
|
||||
|
||||
Minimatch.prototype.make = make
|
||||
function make () {
|
||||
// don't do it more than once.
|
||||
if (this._made) return
|
||||
|
||||
var pattern = this.pattern
|
||||
var options = this.options
|
||||
|
||||
@ -2053,7 +2068,7 @@ function make () {
|
||||
// step 2: expand braces
|
||||
var set = this.globSet = this.braceExpand()
|
||||
|
||||
if (options.debug) this.debug = console.error
|
||||
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
@ -2133,12 +2148,11 @@ function braceExpand (pattern, options) {
|
||||
pattern = typeof pattern === 'undefined'
|
||||
? this.pattern : pattern
|
||||
|
||||
if (typeof pattern === 'undefined') {
|
||||
throw new TypeError('undefined pattern')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (options.nobrace ||
|
||||
!pattern.match(/\{.*\}/)) {
|
||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
||||
// improving this regexp to avoid a ReDOS vulnerability.
|
||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||||
// shortcut. no need to expand.
|
||||
return [pattern]
|
||||
}
|
||||
@ -2146,6 +2160,17 @@ function braceExpand (pattern, options) {
|
||||
return expand(pattern)
|
||||
}
|
||||
|
||||
var MAX_PATTERN_LENGTH = 1024 * 64
|
||||
var assertValidPattern = function (pattern) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('invalid pattern')
|
||||
}
|
||||
|
||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
}
|
||||
|
||||
// parse a component of the expanded set.
|
||||
// At this point, no pattern may contain "/" in it
|
||||
// so we're going to return a 2d array, where each entry is the full
|
||||
@ -2160,14 +2185,17 @@ function braceExpand (pattern, options) {
|
||||
Minimatch.prototype.parse = parse
|
||||
var SUBPARSE = {}
|
||||
function parse (pattern, isSub) {
|
||||
if (pattern.length > 1024 * 64) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
assertValidPattern(pattern)
|
||||
|
||||
var options = this.options
|
||||
|
||||
// shortcuts
|
||||
if (!options.noglobstar && pattern === '**') return GLOBSTAR
|
||||
if (pattern === '**') {
|
||||
if (!options.noglobstar)
|
||||
return GLOBSTAR
|
||||
else
|
||||
pattern = '*'
|
||||
}
|
||||
if (pattern === '') return ''
|
||||
|
||||
var re = ''
|
||||
@ -2223,10 +2251,12 @@ function parse (pattern, isSub) {
|
||||
}
|
||||
|
||||
switch (c) {
|
||||
case '/':
|
||||
/* istanbul ignore next */
|
||||
case '/': {
|
||||
// completely not allowed, even escaped.
|
||||
// Should already be path-split by now.
|
||||
return false
|
||||
}
|
||||
|
||||
case '\\':
|
||||
clearStateChar()
|
||||
@ -2345,25 +2375,23 @@ function parse (pattern, isSub) {
|
||||
|
||||
// handle the case where we left a class open.
|
||||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||
if (inClass) {
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
|
||||
// finish up the class.
|
||||
@ -2447,9 +2475,7 @@ function parse (pattern, isSub) {
|
||||
// something that could conceivably capture a dot
|
||||
var addPatternStart = false
|
||||
switch (re.charAt(0)) {
|
||||
case '.':
|
||||
case '[':
|
||||
case '(': addPatternStart = true
|
||||
case '[': case '.': case '(': addPatternStart = true
|
||||
}
|
||||
|
||||
// Hack to work around lack of negative lookbehind in JS
|
||||
@ -2511,7 +2537,7 @@ function parse (pattern, isSub) {
|
||||
var flags = options.nocase ? 'i' : ''
|
||||
try {
|
||||
var regExp = new RegExp('^' + re + '$', flags)
|
||||
} catch (er) {
|
||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
||||
// If it was an invalid regular expression, then it can't match
|
||||
// anything. This trick looks for a character after the end of
|
||||
// the string, which is of course impossible, except in multi-line
|
||||
@ -2569,7 +2595,7 @@ function makeRe () {
|
||||
|
||||
try {
|
||||
this.regexp = new RegExp(re, flags)
|
||||
} catch (ex) {
|
||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
||||
this.regexp = false
|
||||
}
|
||||
return this.regexp
|
||||
@ -2587,8 +2613,8 @@ minimatch.match = function (list, pattern, options) {
|
||||
return list
|
||||
}
|
||||
|
||||
Minimatch.prototype.match = match
|
||||
function match (f, partial) {
|
||||
Minimatch.prototype.match = function match (f, partial) {
|
||||
if (typeof partial === 'undefined') partial = this.partial
|
||||
this.debug('match', f, this.pattern)
|
||||
// short-circuit in the case of busted things.
|
||||
// comments, etc.
|
||||
@ -2670,6 +2696,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
|
||||
// should be impossible.
|
||||
// some invalid regexp stuff in the set.
|
||||
/* istanbul ignore if */
|
||||
if (p === false) return false
|
||||
|
||||
if (p === GLOBSTAR) {
|
||||
@ -2743,6 +2770,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
// no match was found.
|
||||
// However, in partial mode, we can't say this is necessarily over.
|
||||
// If there's more *pattern* left, then
|
||||
/* istanbul ignore if */
|
||||
if (partial) {
|
||||
// ran out of file
|
||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||
@ -2756,11 +2784,7 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
// patterns with magic have been turned into regexps.
|
||||
var hit
|
||||
if (typeof p === 'string') {
|
||||
if (options.nocase) {
|
||||
hit = f.toLowerCase() === p.toLowerCase()
|
||||
} else {
|
||||
hit = f === p
|
||||
}
|
||||
hit = f === p
|
||||
this.debug('string match', p, f, hit)
|
||||
} else {
|
||||
hit = f.match(p)
|
||||
@ -2791,16 +2815,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
// this is ok if we're doing the match as part of
|
||||
// a glob fs traversal.
|
||||
return partial
|
||||
} else if (pi === pl) {
|
||||
} else /* istanbul ignore else */ if (pi === pl) {
|
||||
// ran out of pattern, still have file left.
|
||||
// this is only acceptable if we're on the very last
|
||||
// empty segment of a file with a trailing slash.
|
||||
// a/* should match a/b/
|
||||
var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
|
||||
return emptyFileEnd
|
||||
return (fi === fl - 1) && (file[fi] === '')
|
||||
}
|
||||
|
||||
// should be unreachable.
|
||||
/* istanbul ignore next */
|
||||
throw new Error('wtf?')
|
||||
}
|
||||
|
||||
@ -4940,13 +4964,15 @@ exports.checkBypass = checkBypass;
|
||||
"use strict";
|
||||
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RefKey = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||
exports.RefKey = exports.Variables = exports.Events = exports.State = exports.Outputs = exports.Inputs = void 0;
|
||||
var Inputs;
|
||||
(function (Inputs) {
|
||||
Inputs["Key"] = "key";
|
||||
Inputs["Path"] = "path";
|
||||
Inputs["RestoreKeys"] = "restore-keys";
|
||||
Inputs["UploadChunkSize"] = "upload-chunk-size";
|
||||
Inputs["FailOnCacheMiss"] = "fail-on-cache-miss";
|
||||
Inputs["SaveOnAnyFailure"] = "save-on-any-failure";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var Outputs;
|
||||
(function (Outputs) {
|
||||
@ -4963,6 +4989,10 @@ var Events;
|
||||
Events["Push"] = "push";
|
||||
Events["PullRequest"] = "pull_request";
|
||||
})(Events = exports.Events || (exports.Events = {}));
|
||||
var Variables;
|
||||
(function (Variables) {
|
||||
Variables["SaveCacheOnAnyFailure"] = "SAVE_CACHE_ON_ANY_FAILURE";
|
||||
})(Variables = exports.Variables || (exports.Variables = {}));
|
||||
exports.RefKey = "GITHUB_REF";
|
||||
|
||||
|
||||
@ -47281,6 +47311,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
const save_only_1 = __webpack_require__(973);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
|
||||
@ -47298,7 +47329,9 @@ function run() {
|
||||
}
|
||||
const state = utils.getCacheState();
|
||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||
const primaryKey = core.getState(constants_1.State.CachePrimaryKey);
|
||||
const primaryKey = save_only_1.saveOnly === true
|
||||
? core.getInput(constants_1.Inputs.Key)
|
||||
: core.getState(constants_1.State.CachePrimaryKey);
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Error retrieving key from state.`);
|
||||
return;
|
||||
@ -47322,7 +47355,6 @@ function run() {
|
||||
}
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
|
||||
|
||||
@ -55192,7 +55224,67 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
||||
//# sourceMappingURL=internal-path-helper.js.map
|
||||
|
||||
/***/ }),
|
||||
/* 973 */,
|
||||
/* 973 */
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.saveOnly = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(196);
|
||||
const save_1 = __importDefault(__webpack_require__(681));
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
function runSaveAction() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (!core.getInput(constants_1.Inputs.Key)) {
|
||||
utils.logWarning(`Error retrieving key from inputs.`);
|
||||
return;
|
||||
}
|
||||
exports.saveOnly = true;
|
||||
yield (0, save_1.default)();
|
||||
});
|
||||
}
|
||||
runSaveAction();
|
||||
exports.default = runSaveAction;
|
||||
|
||||
|
||||
/***/ }),
|
||||
/* 974 */,
|
||||
/* 975 */
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
33
package-lock.json
generated
33
package-lock.json
generated
@ -4055,18 +4055,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-import/node_modules/minimatch": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-import/node_modules/ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
@ -8129,9 +8117,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
@ -12754,15 +12742,6 @@
|
||||
"has": "^1.0.3"
|
||||
}
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
},
|
||||
"ms": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
|
||||
@ -15888,9 +15867,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "3.0.4",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"version": "3.1.2",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
|
||||
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
|
@ -5,7 +5,7 @@
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
"scripts": {
|
||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
|
||||
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/save-only src/save-only.ts",
|
||||
"test": "tsc --noEmit && jest --coverage",
|
||||
"lint": "eslint **/*.ts --cache",
|
||||
"format": "prettier --write **/*.ts",
|
||||
|
27
restore/action.yml
Normal file
27
restore/action.yml
Normal file
@ -0,0 +1,27 @@
|
||||
name: 'Restore Cache'
|
||||
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring and saving the cache'
|
||||
required: true
|
||||
restore-keys:
|
||||
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||
required: false
|
||||
exit-on-cache-miss:
|
||||
description: 'Fail the workflow if the cache is not found for the primary key'
|
||||
required: false
|
||||
default: false
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'A boolean value to indicate an exact match was found for the primary key'
|
||||
runs:
|
||||
using: 'node16'
|
||||
main: '../dist/restore/index.js'
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
||||
|
19
save/action.yml
Normal file
19
save/action.yml
Normal file
@ -0,0 +1,19 @@
|
||||
name: 'Save Cache'
|
||||
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
path:
|
||||
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring and saving the cache'
|
||||
required: true
|
||||
upload-chunk-size:
|
||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||
required: false
|
||||
runs:
|
||||
using: 'node16'
|
||||
main: '../dist/save/index.js'
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
@ -2,7 +2,9 @@ export enum Inputs {
|
||||
Key = "key",
|
||||
Path = "path",
|
||||
RestoreKeys = "restore-keys",
|
||||
UploadChunkSize = "upload-chunk-size"
|
||||
UploadChunkSize = "upload-chunk-size",
|
||||
FailOnCacheMiss = "fail-on-cache-miss",
|
||||
SaveOnAnyFailure = "save-on-any-failure"
|
||||
}
|
||||
|
||||
export enum Outputs {
|
||||
@ -20,4 +22,8 @@ export enum Events {
|
||||
PullRequest = "pull_request"
|
||||
}
|
||||
|
||||
export enum Variables {
|
||||
SaveCacheOnAnyFailure = "SAVE_CACHE_ON_ANY_FAILURE"
|
||||
}
|
||||
|
||||
export const RefKey = "GITHUB_REF";
|
||||
|
@ -1,7 +1,7 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import { Events, Inputs, State, Variables } from "./constants";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
@ -35,22 +35,46 @@ async function run(): Promise<void> {
|
||||
restoreKeys
|
||||
);
|
||||
|
||||
//Check if user wants to save cache despite of failure in any previous job
|
||||
const saveCache = core.getBooleanInput(Inputs.SaveOnAnyFailure);
|
||||
if (saveCache == true) {
|
||||
core.debug(
|
||||
`Exporting environment variable ${Variables.SaveCacheOnAnyFailure}`
|
||||
);
|
||||
core.exportVariable(Variables.SaveCacheOnAnyFailure, saveCache);
|
||||
core.info(
|
||||
`Input Variable ${Variables.SaveCacheOnAnyFailure} is set to true, the cache will be saved despite of any failure in the build.`
|
||||
);
|
||||
}
|
||||
|
||||
if (!cacheKey) {
|
||||
if (core.getBooleanInput(Inputs.FailOnCacheMiss) == true) {
|
||||
throw new Error(
|
||||
`Cache with the given input key ${primaryKey} is not found, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||
);
|
||||
}
|
||||
core.info(
|
||||
`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
].join(", ")}`
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Store the matched cache key
|
||||
utils.setCacheState(cacheKey);
|
||||
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||
utils.setCacheHitOutput(isExactKeyMatch);
|
||||
|
||||
if (
|
||||
!isExactKeyMatch &&
|
||||
core.getBooleanInput(Inputs.FailOnCacheMiss) == true
|
||||
) {
|
||||
throw new Error(
|
||||
`Restored cache key doesn't match the given input key ${primaryKey}, hence exiting the workflow as the fail-on-cache-miss requirement is not met.`
|
||||
);
|
||||
}
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
} catch (error: unknown) {
|
||||
core.setFailed((error as Error).message);
|
||||
|
20
src/save-only.ts
Normal file
20
src/save-only.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Inputs } from "./constants";
|
||||
import save from "./save";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function runSaveAction(): Promise<void> {
|
||||
if (!core.getInput(Inputs.Key)) {
|
||||
utils.logWarning(`Error retrieving key from inputs.`);
|
||||
return;
|
||||
}
|
||||
saveOnly = true;
|
||||
|
||||
await save();
|
||||
}
|
||||
|
||||
runSaveAction();
|
||||
|
||||
export default runSaveAction;
|
||||
export let saveOnly: boolean;
|
@ -2,6 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import { saveOnly } from "./save-only";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
|
||||
@ -27,7 +28,11 @@ async function run(): Promise<void> {
|
||||
const state = utils.getCacheState();
|
||||
|
||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
const primaryKey =
|
||||
saveOnly === true
|
||||
? core.getInput(Inputs.Key)
|
||||
: core.getState(State.CachePrimaryKey);
|
||||
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Error retrieving key from state.`);
|
||||
return;
|
||||
@ -56,6 +61,4 @@ async function run(): Promise<void> {
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
|
@ -13,18 +13,28 @@ interface CacheInput {
|
||||
path: string;
|
||||
key: string;
|
||||
restoreKeys?: string[];
|
||||
failOnCacheMiss?: boolean;
|
||||
saveOnAnyFailure?: boolean;
|
||||
}
|
||||
|
||||
export function setInputs(input: CacheInput): void {
|
||||
setInput(Inputs.Path, input.path);
|
||||
setInput(Inputs.Key, input.key);
|
||||
setInput(Inputs.SaveOnAnyFailure, "false");
|
||||
setInput(Inputs.FailOnCacheMiss, "false");
|
||||
input.restoreKeys &&
|
||||
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
|
||||
input.failOnCacheMiss &&
|
||||
setInput(Inputs.FailOnCacheMiss, String(input.failOnCacheMiss));
|
||||
input.saveOnAnyFailure &&
|
||||
setInput(Inputs.SaveOnAnyFailure, String(input.saveOnAnyFailure));
|
||||
}
|
||||
|
||||
export function clearInputs(): void {
|
||||
delete process.env[getInputName(Inputs.Path)];
|
||||
delete process.env[getInputName(Inputs.Key)];
|
||||
delete process.env[getInputName(Inputs.RestoreKeys)];
|
||||
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
|
||||
delete process.env[getInputName(Inputs.SaveOnAnyFailure)];
|
||||
delete process.env[getInputName(Inputs.UploadChunkSize)];
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
#### Cache segment restore timeout
|
||||
## Cache segment restore timeout
|
||||
A cache gets downloaded in multiple segments of fixed sizes (`1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.8` of `actions/cache` introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
|
||||
|
||||
Default value of this timeout is 60 minutes and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
|
||||
|
||||
#### Update a cache
|
||||
## Update a cache
|
||||
A cache today is immutable and cannot be updated. But some use cases require the cache to be saved even though there was a "hit" during restore. To do so, use a `key` which is unique for every run and use `restore-keys` to restore the nearest cache. For example:
|
||||
```yaml
|
||||
- name: update cache on every commit
|
||||
@ -16,10 +16,10 @@ A cache today is immutable and cannot be updated. But some use cases require the
|
||||
```
|
||||
Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits).
|
||||
|
||||
#### Use cache across feature branches
|
||||
## Use cache across feature branches
|
||||
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
|
||||
|
||||
#### Improving cache restore performance on Windows/Using cross-os caching
|
||||
## Improving cache restore performance on Windows/Using cross-os caching
|
||||
Currently, cache restore is slow on Windows due to tar being inherently slow and the compression algorithm `gzip` in use. `zstd` is the default algorithm in use on linux and macos. It was disabled on Windows due to issues with bsd tar(libarchive), the tar implementation in use on Windows.
|
||||
|
||||
To improve cache restore performance, we can re-enable `zstd` as the compression algorithm using the following workaround. Add the following step to your workflow before the cache step:
|
||||
@ -35,4 +35,49 @@ To improve cache restore performance, we can re-enable `zstd` as the compression
|
||||
|
||||
The `cache` action will use GNU tar instead of bsd tar on Windows. This should work on all Github Hosted runners as it is. For self-hosted runners, please ensure you have GNU tar and `zstd` installed.
|
||||
|
||||
The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key.
|
||||
The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key.
|
||||
|
||||
## Force deletion of caches overriding default cache eviction policy
|
||||
Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches.
|
||||
|
||||
This workflow uses `gh-actions-cache` to delete all the caches created by a branch.
|
||||
<details>
|
||||
<summary>Example</summary>
|
||||
|
||||
```yaml
|
||||
name: cleanup caches by a branch
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH=${{ github.ref }}
|
||||
|
||||
echo "Fetching list of cache key"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
```
|
||||
</details>
|
Reference in New Issue
Block a user