Compare commits

..

5 Commits

31 changed files with 6233 additions and 129713 deletions

View File

@ -1,14 +0,0 @@
{
"name": "Node.js & TypeScript",
"image": "mcr.microsoft.com/devcontainers/typescript-node:16-bullseye",
// Features to add to the dev container. More info: https://containers.dev/implementors/features.
// "features": {},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "npm install && npm run build"
// Configure tool-specific properties.
// "customizations": {},
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
// "remoteUser": "root"
}

21
.github/auto_assign.yml vendored Normal file
View File

@ -0,0 +1,21 @@
# Set to true to add reviewers to pull requests
addReviewers: true
# Set to true to add assignees to pull requests
addAssignees: false
# A list of reviewers to be added to pull requests (GitHub user name)
reviewers:
- anuragc617
- pallavx
- pdotl
- phantsure
- kotewar
- aparna-ravindra
- tiwarishub
- vsvipul
- bishal-pdmsft
# A number of reviewers added to the pull request
# Set 0 to add all the reviewers (default: 0)
numberOfReviewers: 1

View File

@ -1,20 +0,0 @@
name: Add Reviewer PR
on:
pull_request_target:
types: [opened]
jobs:
run-action:
runs-on: ubuntu-latest
steps:
- name: Get current oncall
id: oncall
run: |
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
- name: Request Review
run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/pulls/${{ github.event.pull_request.number}}/requested_reviewers -d '{"reviewers":["${{steps.oncall.outputs.CURRENT}}"]}'
- name: Add Assignee
run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'

View File

@ -1,16 +0,0 @@
name: Assign issue
on:
issues:
types: [opened]
jobs:
run-action:
runs-on: ubuntu-latest
steps:
- name: Get current oncall
id: oncall
run: |
echo "CURRENT=$(curl --request GET 'https://api.pagerduty.com/oncalls?include[]=users&schedule_ids[]=P5VG2BX&earliest=true' --header 'Authorization: Token token=${{ secrets.PAGERDUTY_TOKEN }}' --header 'Accept: application/vnd.pagerduty+json;version=2' --header 'Content-Type: application/json' | jq -r '.oncalls[].user.name')" >> $GITHUB_OUTPUT
- name: add_assignees
run: |
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'

View File

@ -0,0 +1,15 @@
name: Issue assignment
on:
issues:
types: [opened]
jobs:
auto-assign:
runs-on: ubuntu-latest
steps:
- name: 'Auto-assign issue'
uses: pozil/auto-assign-issue@v1.4.0
with:
assignees: anuragc617,pallavx,pdotl,phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
numOfAssignee: 1

10
.github/workflows/auto-assign.yml vendored Normal file
View File

@ -0,0 +1,10 @@
name: 'Auto Assign'
on:
pull_request_target:
types: [opened, ready_for_review]
jobs:
add-reviews:
runs-on: ubuntu-latest
steps:
- uses: kentaro-m/auto-assign-action@v1.2.1

View File

@ -27,7 +27,6 @@ jobs:
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 16.x node-version: 16.x
cache: npm
- name: Install dependencies - name: Install dependencies
run: npm ci run: npm ci
- name: Rebuild the dist/ directory - name: Rebuild the dist/ directory

View File

@ -25,7 +25,17 @@ jobs:
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 16.x node-version: 16.x
cache: npm - name: Determine npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- name: Restore npm cache
uses: actions/cache@v3
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- run: npm ci - run: npm ci
- name: Prettier Format Check - name: Prettier Format Check
run: npm run format-check run: npm run format-check

View File

@ -37,7 +37,7 @@ If you are using this inside a container, a POSIX-compliant `tar` needs to be in
* `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key. * `restore-keys` - An ordered list of prefix-matched keys to use for restoring stale cache if no cache hit occurred for key.
#### Environment Variables #### Environment Variables
* `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `60`) to abort download of the segment if not completed in the defined number of minutes. [Read more](https://github.com/actions/cache/blob/main/workarounds.md#cache-segment-restore-timeout) * `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `60`) to abort download of the segment if not completed in the defined number of minutes. [Read more](#cache-segment-restore-timeout)
### Outputs ### Outputs
@ -118,7 +118,7 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions. A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions.
For example, using the [`hashFiles`](https://docs.github.com/en/actions/learn-github-actions/expressions#hashfiles) function allows you to create a new cache when dependencies change. For example, using the [`hashFiles`](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles) function allows you to create a new cache when dependencies change.
```yaml ```yaml
- uses: actions/cache@v3 - uses: actions/cache@v3
@ -233,11 +233,10 @@ jobs:
## Known practices and workarounds ## Known practices and workarounds
Following are some of the known practices/workarounds which community has used to fulfill specific requirements. You may choose to use them if suits your use case. Note these are not necessarily the only or the recommended solution. Following are some of the known practices/workarounds which community has used to fulfill specific requirements. You may choose to use them if suits your use case. Note these are not necessarily the only or the recommended solution.
- [Cache segment restore timeout](./tips-and-workarounds.md#cache-segment-restore-timeout) - [Cache segment restore timeout](./workarounds.md#cache-segment-restore-timeout)
- [Update a cache](./tips-and-workarounds.md#update-a-cache) - [Update a cache](./workarounds.md#update-a-cache)
- [Use cache across feature branches](./tips-and-workarounds.md#use-cache-across-feature-branches) - [Use cache across feature branches](./workarounds.md#use-cache-across-feature-branches)
- [Improving cache restore performance on Windows/Using cross-os caching](./tips-and-workarounds.md#improving-cache-restore-performance-on-windows-using-cross-os-caching) - [Improving cache restore performance on Windows/Using cross-os caching](./workarounds.md#improving-cache-restore-performance-on-windows-using-cross-os-caching)
- [Force deletion of caches overriding default cache eviction policy](./tips-and-workarounds.md#force-deletion-of-caches-overriding-default-cache-eviction-policy)
#### Windows environment variables #### Windows environment variables
Please note that Windows environment variables (like `%LocalAppData%`) will NOT be expanded by this action. Instead, prefer using `~` in your paths which will expand to HOME directory. For example, instead of `%LocalAppData%`, use `~\AppData\Local`. For a list of supported default environment variables, see [this](https://docs.github.com/en/actions/learn-github-actions/environment-variables) page. Please note that Windows environment variables (like `%LocalAppData%`) will NOT be expanded by this action. Instead, prefer using `~` in your paths which will expand to HOME directory. For example, instead of `%LocalAppData%`, use `~\AppData\Local`. For a list of supported default environment variables, see [this](https://docs.github.com/en/actions/learn-github-actions/environment-variables) page.

View File

@ -102,7 +102,7 @@ test("restore on GHES with AC available ", async () => {
const infoMock = jest.spyOn(core, "info"); const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed"); const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState"); const stateMock = jest.spyOn(core, "saveState");
const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const restoreCacheMock = jest const restoreCacheMock = jest
.spyOn(cache, "restoreCache") .spyOn(cache, "restoreCache")
.mockImplementationOnce(() => { .mockImplementationOnce(() => {
@ -116,7 +116,7 @@ test("restore on GHES with AC available ", async () => {
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true"); expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
@ -270,7 +270,7 @@ test("restore with cache found for key", async () => {
const infoMock = jest.spyOn(core, "info"); const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed"); const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState"); const stateMock = jest.spyOn(core, "saveState");
const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const restoreCacheMock = jest const restoreCacheMock = jest
.spyOn(cache, "restoreCache") .spyOn(cache, "restoreCache")
.mockImplementationOnce(() => { .mockImplementationOnce(() => {
@ -284,7 +284,7 @@ test("restore with cache found for key", async () => {
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "true"); expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
@ -303,7 +303,7 @@ test("restore with cache found for restore key", async () => {
const infoMock = jest.spyOn(core, "info"); const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed"); const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState"); const stateMock = jest.spyOn(core, "saveState");
const setCacheHitOutputMock = jest.spyOn(core, "setOutput"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const restoreCacheMock = jest const restoreCacheMock = jest
.spyOn(cache, "restoreCache") .spyOn(cache, "restoreCache")
.mockImplementationOnce(() => { .mockImplementationOnce(() => {
@ -317,7 +317,8 @@ test("restore with cache found for restore key", async () => {
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith("cache-hit", "false"); expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(infoMock).toHaveBeenCalledWith( expect(infoMock).toHaveBeenCalledWith(
`Cache restored from key: ${restoreKey}` `Cache restored from key: ${restoreKey}`
); );

View File

@ -21,7 +21,7 @@ runs:
using: 'node16' using: 'node16'
main: 'dist/restore/index.js' main: 'dist/restore/index.js'
post: 'dist/save/index.js' post: 'dist/save/index.js'
post-if: success() post-if: 'success()'
branding: branding:
icon: 'archive' icon: 'archive'
color: 'gray-dark' color: 'gray-dark'

File diff suppressed because one or more lines are too long

5587
dist/restore/index.js vendored

File diff suppressed because it is too large Load Diff

61382
dist/save-only/index.js vendored

File diff suppressed because one or more lines are too long

431
dist/save/index.js vendored
View File

@ -1892,10 +1892,10 @@ function serial(list, iterator, callback)
module.exports = minimatch module.exports = minimatch
minimatch.Minimatch = Minimatch minimatch.Minimatch = Minimatch
var path = (function () { try { return __webpack_require__(622) } catch (e) {}}()) || { var path = { sep: '/' }
sep: '/' try {
} path = __webpack_require__(622)
minimatch.sep = path.sep } catch (er) {}
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
var expand = __webpack_require__(306) var expand = __webpack_require__(306)
@ -1947,64 +1947,43 @@ function filter (pattern, options) {
} }
function ext (a, b) { function ext (a, b) {
a = a || {}
b = b || {} b = b || {}
var t = {} var t = {}
Object.keys(a).forEach(function (k) {
t[k] = a[k]
})
Object.keys(b).forEach(function (k) { Object.keys(b).forEach(function (k) {
t[k] = b[k] t[k] = b[k]
}) })
Object.keys(a).forEach(function (k) {
t[k] = a[k]
})
return t return t
} }
minimatch.defaults = function (def) { minimatch.defaults = function (def) {
if (!def || typeof def !== 'object' || !Object.keys(def).length) { if (!def || !Object.keys(def).length) return minimatch
return minimatch
}
var orig = minimatch var orig = minimatch
var m = function minimatch (p, pattern, options) { var m = function minimatch (p, pattern, options) {
return orig(p, pattern, ext(def, options)) return orig.minimatch(p, pattern, ext(def, options))
} }
m.Minimatch = function Minimatch (pattern, options) { m.Minimatch = function Minimatch (pattern, options) {
return new orig.Minimatch(pattern, ext(def, options)) return new orig.Minimatch(pattern, ext(def, options))
} }
m.Minimatch.defaults = function defaults (options) {
return orig.defaults(ext(def, options)).Minimatch
}
m.filter = function filter (pattern, options) {
return orig.filter(pattern, ext(def, options))
}
m.defaults = function defaults (options) {
return orig.defaults(ext(def, options))
}
m.makeRe = function makeRe (pattern, options) {
return orig.makeRe(pattern, ext(def, options))
}
m.braceExpand = function braceExpand (pattern, options) {
return orig.braceExpand(pattern, ext(def, options))
}
m.match = function (list, pattern, options) {
return orig.match(list, pattern, ext(def, options))
}
return m return m
} }
Minimatch.defaults = function (def) { Minimatch.defaults = function (def) {
if (!def || !Object.keys(def).length) return Minimatch
return minimatch.defaults(def).Minimatch return minimatch.defaults(def).Minimatch
} }
function minimatch (p, pattern, options) { function minimatch (p, pattern, options) {
assertValidPattern(pattern) if (typeof pattern !== 'string') {
throw new TypeError('glob pattern string required')
}
if (!options) options = {} if (!options) options = {}
@ -2013,6 +1992,9 @@ function minimatch (p, pattern, options) {
return false return false
} }
// "" only matches ""
if (pattern.trim() === '') return p === ''
return new Minimatch(pattern, options).match(p) return new Minimatch(pattern, options).match(p)
} }
@ -2021,14 +2003,15 @@ function Minimatch (pattern, options) {
return new Minimatch(pattern, options) return new Minimatch(pattern, options)
} }
assertValidPattern(pattern) if (typeof pattern !== 'string') {
throw new TypeError('glob pattern string required')
}
if (!options) options = {} if (!options) options = {}
pattern = pattern.trim() pattern = pattern.trim()
// windows support: need to use /, not \ // windows support: need to use /, not \
if (!options.allowWindowsEscape && path.sep !== '/') { if (path.sep !== '/') {
pattern = pattern.split(path.sep).join('/') pattern = pattern.split(path.sep).join('/')
} }
@ -2039,7 +2022,6 @@ function Minimatch (pattern, options) {
this.negate = false this.negate = false
this.comment = false this.comment = false
this.empty = false this.empty = false
this.partial = !!options.partial
// make the set of regexps etc. // make the set of regexps etc.
this.make() this.make()
@ -2049,6 +2031,9 @@ Minimatch.prototype.debug = function () {}
Minimatch.prototype.make = make Minimatch.prototype.make = make
function make () { function make () {
// don't do it more than once.
if (this._made) return
var pattern = this.pattern var pattern = this.pattern
var options = this.options var options = this.options
@ -2068,7 +2053,7 @@ function make () {
// step 2: expand braces // step 2: expand braces
var set = this.globSet = this.braceExpand() var set = this.globSet = this.braceExpand()
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } if (options.debug) this.debug = console.error
this.debug(this.pattern, set) this.debug(this.pattern, set)
@ -2148,11 +2133,12 @@ function braceExpand (pattern, options) {
pattern = typeof pattern === 'undefined' pattern = typeof pattern === 'undefined'
? this.pattern : pattern ? this.pattern : pattern
assertValidPattern(pattern) if (typeof pattern === 'undefined') {
throw new TypeError('undefined pattern')
}
// Thanks to Yeting Li <https://github.com/yetingli> for if (options.nobrace ||
// improving this regexp to avoid a ReDOS vulnerability. !pattern.match(/\{.*\}/)) {
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
// shortcut. no need to expand. // shortcut. no need to expand.
return [pattern] return [pattern]
} }
@ -2160,17 +2146,6 @@ function braceExpand (pattern, options) {
return expand(pattern) return expand(pattern)
} }
var MAX_PATTERN_LENGTH = 1024 * 64
var assertValidPattern = function (pattern) {
if (typeof pattern !== 'string') {
throw new TypeError('invalid pattern')
}
if (pattern.length > MAX_PATTERN_LENGTH) {
throw new TypeError('pattern is too long')
}
}
// parse a component of the expanded set. // parse a component of the expanded set.
// At this point, no pattern may contain "/" in it // At this point, no pattern may contain "/" in it
// so we're going to return a 2d array, where each entry is the full // so we're going to return a 2d array, where each entry is the full
@ -2185,17 +2160,14 @@ var assertValidPattern = function (pattern) {
Minimatch.prototype.parse = parse Minimatch.prototype.parse = parse
var SUBPARSE = {} var SUBPARSE = {}
function parse (pattern, isSub) { function parse (pattern, isSub) {
assertValidPattern(pattern) if (pattern.length > 1024 * 64) {
throw new TypeError('pattern is too long')
}
var options = this.options var options = this.options
// shortcuts // shortcuts
if (pattern === '**') { if (!options.noglobstar && pattern === '**') return GLOBSTAR
if (!options.noglobstar)
return GLOBSTAR
else
pattern = '*'
}
if (pattern === '') return '' if (pattern === '') return ''
var re = '' var re = ''
@ -2251,12 +2223,10 @@ function parse (pattern, isSub) {
} }
switch (c) { switch (c) {
/* istanbul ignore next */ case '/':
case '/': {
// completely not allowed, even escaped. // completely not allowed, even escaped.
// Should already be path-split by now. // Should already be path-split by now.
return false return false
}
case '\\': case '\\':
clearStateChar() clearStateChar()
@ -2375,23 +2345,25 @@ function parse (pattern, isSub) {
// handle the case where we left a class open. // handle the case where we left a class open.
// "[z-a]" is valid, equivalent to "\[z-a\]" // "[z-a]" is valid, equivalent to "\[z-a\]"
// split where the last [ was, make sure we don't have if (inClass) {
// an invalid re. if so, re-walk the contents of the // split where the last [ was, make sure we don't have
// would-be class to re-translate any characters that // an invalid re. if so, re-walk the contents of the
// were passed through as-is // would-be class to re-translate any characters that
// TODO: It would probably be faster to determine this // were passed through as-is
// without a try/catch and a new RegExp, but it's tricky // TODO: It would probably be faster to determine this
// to do safely. For now, this is safe and works. // without a try/catch and a new RegExp, but it's tricky
var cs = pattern.substring(classStart + 1, i) // to do safely. For now, this is safe and works.
try { var cs = pattern.substring(classStart + 1, i)
RegExp('[' + cs + ']') try {
} catch (er) { RegExp('[' + cs + ']')
// not a valid class! } catch (er) {
var sp = this.parse(cs, SUBPARSE) // not a valid class!
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' var sp = this.parse(cs, SUBPARSE)
hasMagic = hasMagic || sp[1] re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
inClass = false hasMagic = hasMagic || sp[1]
continue inClass = false
continue
}
} }
// finish up the class. // finish up the class.
@ -2475,7 +2447,9 @@ function parse (pattern, isSub) {
// something that could conceivably capture a dot // something that could conceivably capture a dot
var addPatternStart = false var addPatternStart = false
switch (re.charAt(0)) { switch (re.charAt(0)) {
case '[': case '.': case '(': addPatternStart = true case '.':
case '[':
case '(': addPatternStart = true
} }
// Hack to work around lack of negative lookbehind in JS // Hack to work around lack of negative lookbehind in JS
@ -2537,7 +2511,7 @@ function parse (pattern, isSub) {
var flags = options.nocase ? 'i' : '' var flags = options.nocase ? 'i' : ''
try { try {
var regExp = new RegExp('^' + re + '$', flags) var regExp = new RegExp('^' + re + '$', flags)
} catch (er) /* istanbul ignore next - should be impossible */ { } catch (er) {
// If it was an invalid regular expression, then it can't match // If it was an invalid regular expression, then it can't match
// anything. This trick looks for a character after the end of // anything. This trick looks for a character after the end of
// the string, which is of course impossible, except in multi-line // the string, which is of course impossible, except in multi-line
@ -2595,7 +2569,7 @@ function makeRe () {
try { try {
this.regexp = new RegExp(re, flags) this.regexp = new RegExp(re, flags)
} catch (ex) /* istanbul ignore next - should be impossible */ { } catch (ex) {
this.regexp = false this.regexp = false
} }
return this.regexp return this.regexp
@ -2613,8 +2587,8 @@ minimatch.match = function (list, pattern, options) {
return list return list
} }
Minimatch.prototype.match = function match (f, partial) { Minimatch.prototype.match = match
if (typeof partial === 'undefined') partial = this.partial function match (f, partial) {
this.debug('match', f, this.pattern) this.debug('match', f, this.pattern)
// short-circuit in the case of busted things. // short-circuit in the case of busted things.
// comments, etc. // comments, etc.
@ -2696,7 +2670,6 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// should be impossible. // should be impossible.
// some invalid regexp stuff in the set. // some invalid regexp stuff in the set.
/* istanbul ignore if */
if (p === false) return false if (p === false) return false
if (p === GLOBSTAR) { if (p === GLOBSTAR) {
@ -2770,7 +2743,6 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// no match was found. // no match was found.
// However, in partial mode, we can't say this is necessarily over. // However, in partial mode, we can't say this is necessarily over.
// If there's more *pattern* left, then // If there's more *pattern* left, then
/* istanbul ignore if */
if (partial) { if (partial) {
// ran out of file // ran out of file
this.debug('\n>>> no match, partial?', file, fr, pattern, pr) this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
@ -2784,7 +2756,11 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// patterns with magic have been turned into regexps. // patterns with magic have been turned into regexps.
var hit var hit
if (typeof p === 'string') { if (typeof p === 'string') {
hit = f === p if (options.nocase) {
hit = f.toLowerCase() === p.toLowerCase()
} else {
hit = f === p
}
this.debug('string match', p, f, hit) this.debug('string match', p, f, hit)
} else { } else {
hit = f.match(p) hit = f.match(p)
@ -2815,16 +2791,16 @@ Minimatch.prototype.matchOne = function (file, pattern, partial) {
// this is ok if we're doing the match as part of // this is ok if we're doing the match as part of
// a glob fs traversal. // a glob fs traversal.
return partial return partial
} else /* istanbul ignore else */ if (pi === pl) { } else if (pi === pl) {
// ran out of pattern, still have file left. // ran out of pattern, still have file left.
// this is only acceptable if we're on the very last // this is only acceptable if we're on the very last
// empty segment of a file with a trailing slash. // empty segment of a file with a trailing slash.
// a/* should match a/b/ // a/* should match a/b/
return (fi === fl - 1) && (file[fi] === '') var emptyFileEnd = (fi === fl - 1) && (file[fi] === '')
return emptyFileEnd
} }
// should be unreachable. // should be unreachable.
/* istanbul ignore next */
throw new Error('wtf?') throw new Error('wtf?')
} }
@ -4975,8 +4951,6 @@ var Inputs;
var Outputs; var Outputs;
(function (Outputs) { (function (Outputs) {
Outputs["CacheHit"] = "cache-hit"; Outputs["CacheHit"] = "cache-hit";
Outputs["Key"] = "key";
Outputs["MatchedKey"] = "matched-key";
})(Outputs = exports.Outputs || (exports.Outputs = {})); })(Outputs = exports.Outputs || (exports.Outputs = {}));
var State; var State;
(function (State) { (function (State) {
@ -9346,76 +9320,7 @@ function expand(str, isTop) {
/***/ }), /***/ }),
/* 307 */, /* 307 */,
/* 308 */, /* 308 */,
/* 309 */ /* 309 */,
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.NullStateProvider = exports.StateProvider = void 0;
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196);
class StateProviderBase {
constructor() {
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function
this.setState = (key, value) => { };
// eslint-disable-next-line @typescript-eslint/no-unused-vars
this.getState = (key) => "";
}
getCacheState() {
const cacheKey = this.getState(constants_1.State.CacheMatchedKey);
if (cacheKey) {
core.debug(`Cache state/key: ${cacheKey}`);
return cacheKey;
}
return undefined;
}
}
class StateProvider extends StateProviderBase {
constructor() {
super(...arguments);
//setOutput = core.setOutput;
this.setState = core.saveState;
this.getState = core.getState;
}
}
exports.StateProvider = StateProvider;
class NullStateProvider extends StateProviderBase {
constructor() {
super(...arguments);
//setOutput = core.setOutput;
this.setState = core.setOutput;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
this.getState = (key) => "";
}
}
exports.NullStateProvider = NullStateProvider;
/***/ }),
/* 310 */, /* 310 */,
/* 311 */, /* 311 */,
/* 312 */ /* 312 */
@ -38493,7 +38398,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result; return result;
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.setCacheHitOutput = exports.isExactKeyMatch = exports.isGhes = void 0; exports.isCacheFeatureAvailable = exports.getInputAsInt = exports.getInputAsArray = exports.isValidEvent = exports.logWarning = exports.getCacheState = exports.setOutputAndState = exports.setCacheHitOutput = exports.setCacheState = exports.isExactKeyMatch = exports.isGhes = void 0;
const cache = __importStar(__webpack_require__(692)); const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470)); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196); const constants_1 = __webpack_require__(196);
@ -38509,10 +38414,29 @@ function isExactKeyMatch(key, cacheKey) {
}) === 0); }) === 0);
} }
exports.isExactKeyMatch = isExactKeyMatch; exports.isExactKeyMatch = isExactKeyMatch;
function setCacheState(state) {
core.saveState(constants_1.State.CacheMatchedKey, state);
}
exports.setCacheState = setCacheState;
function setCacheHitOutput(isCacheHit) { function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
} }
exports.setCacheHitOutput = setCacheHitOutput; exports.setCacheHitOutput = setCacheHitOutput;
function setOutputAndState(key, cacheKey) {
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
// Store the matched cache key if it exists
cacheKey && setCacheState(cacheKey);
}
exports.setOutputAndState = setOutputAndState;
function getCacheState() {
const cacheKey = core.getState(constants_1.State.CacheMatchedKey);
if (cacheKey) {
core.debug(`Cache state/key: ${cacheKey}`);
return cacheKey;
}
return undefined;
}
exports.getCacheState = getCacheState;
function logWarning(message) { function logWarning(message) {
const warningPrefix = "[warning]"; const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`); core.info(`${warningPrefix}${message}`);
@ -38541,16 +38465,17 @@ function getInputAsInt(name, options) {
} }
exports.getInputAsInt = getInputAsInt; exports.getInputAsInt = getInputAsInt;
function isCacheFeatureAvailable() { function isCacheFeatureAvailable() {
if (cache.isFeatureAvailable()) { if (!cache.isFeatureAvailable()) {
return true; if (isGhes()) {
} logWarning(`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
if (isGhes()) {
logWarning(`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`); Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`);
}
else {
logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions.");
}
return false; return false;
} }
logWarning("An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."); return true;
return false;
} }
exports.isCacheFeatureAvailable = isCacheFeatureAvailable; exports.isCacheFeatureAvailable = isCacheFeatureAvailable;
@ -40944,96 +40869,7 @@ Object.defineProperty(exports, "toPlatformPath", { enumerable: true, get: functi
//# sourceMappingURL=core.js.map //# sourceMappingURL=core.js.map
/***/ }), /***/ }),
/* 471 */ /* 471 */,
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const cache = __importStar(__webpack_require__(692));
const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196);
const utils = __importStar(__webpack_require__(443));
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
// throw an uncaught exception. Instead of failing this action, just warn.
process.on("uncaughtException", e => utils.logWarning(e.message));
function saveImpl(stateProvider) {
return __awaiter(this, void 0, void 0, function* () {
try {
if (!utils.isCacheFeatureAvailable()) {
return;
}
if (!utils.isValidEvent()) {
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported because it's not tied to a branch or tag ref.`);
return;
}
// If restore has stored a primary key in state, reuse that
// Else re-evaluate from inputs
const primaryKey = stateProvider.getState(constants_1.State.CachePrimaryKey) ||
core.getInput(constants_1.Inputs.Key);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
return;
}
// If matched restore key is same as primary key, then do not save cache
// NO-OP in case of SaveOnly action
const state = stateProvider.getCacheState();
if (utils.isExactKeyMatch(primaryKey, state)) {
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
return;
}
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true
});
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
}
catch (error) {
utils.logWarning(error.message);
}
});
}
exports.default = saveImpl;
/***/ }),
/* 472 */, /* 472 */,
/* 473 */, /* 473 */,
/* 474 */, /* 474 */,
@ -47409,6 +47245,29 @@ exports.default = _default;
"use strict"; "use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) { return new (P || (P = Promise))(function (resolve, reject) {
@ -47418,15 +47277,49 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next()); step((generator = generator.apply(thisArg, _arguments || [])).next());
}); });
}; };
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const saveImpl_1 = __importDefault(__webpack_require__(471)); const cache = __importStar(__webpack_require__(692));
const stateProvider_1 = __webpack_require__(309); const core = __importStar(__webpack_require__(470));
const constants_1 = __webpack_require__(196);
const utils = __importStar(__webpack_require__(443));
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
// throw an uncaught exception. Instead of failing this action, just warn.
process.on("uncaughtException", e => utils.logWarning(e.message));
function run() { function run() {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
yield (0, saveImpl_1.default)(new stateProvider_1.StateProvider()); try {
if (!utils.isCacheFeatureAvailable()) {
return;
}
if (!utils.isValidEvent()) {
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported because it's not tied to a branch or tag ref.`);
return;
}
const state = utils.getCacheState();
// Inputs are re-evaluted before the post action, so we want the original key used for restore
const primaryKey = core.getState(constants_1.State.CachePrimaryKey);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
return;
}
if (utils.isExactKeyMatch(primaryKey, state)) {
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
return;
}
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true
});
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
}
catch (error) {
utils.logWarning(error.message);
}
}); });
} }
run(); run();

6492
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -5,7 +5,7 @@
"description": "Cache dependencies and build outputs", "description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js", "main": "dist/restore/index.js",
"scripts": { "scripts": {
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && ncc build -o dist/restore-only src/restoreOnly.ts && ncc build -o dist/save-only src/saveOnly.ts", "build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
"test": "tsc --noEmit && jest --coverage", "test": "tsc --noEmit && jest --coverage",
"lint": "eslint **/*.ts --cache", "lint": "eslint **/*.ts --cache",
"format": "prettier --write **/*.ts", "format": "prettier --write **/*.ts",
@ -29,23 +29,23 @@
"@actions/io": "^1.1.2" "@actions/io": "^1.1.2"
}, },
"devDependencies": { "devDependencies": {
"@types/jest": "^27.5.2", "@types/jest": "^27.5.0",
"@types/nock": "^11.1.0", "@types/nock": "^11.1.0",
"@types/node": "^16.18.3", "@types/node": "^16.11.33",
"@typescript-eslint/eslint-plugin": "^5.45.0", "@typescript-eslint/eslint-plugin": "^5.22.0",
"@typescript-eslint/parser": "^5.45.0", "@typescript-eslint/parser": "^5.22.0",
"@zeit/ncc": "^0.20.5", "@zeit/ncc": "^0.20.5",
"eslint": "^8.28.0", "eslint": "^8.14.0",
"eslint-config-prettier": "^8.5.0", "eslint-config-prettier": "^8.5.0",
"eslint-plugin-import": "^2.26.0", "eslint-plugin-import": "^2.26.0",
"eslint-plugin-jest": "^26.9.0", "eslint-plugin-jest": "^26.1.5",
"eslint-plugin-prettier": "^4.2.1", "eslint-plugin-prettier": "^4.0.0",
"eslint-plugin-simple-import-sort": "^7.0.0", "eslint-plugin-simple-import-sort": "^7.0.0",
"jest": "^28.1.3", "jest": "^28.0.3",
"jest-circus": "^27.5.1", "jest-circus": "^27.5.1",
"nock": "^13.2.9", "nock": "^13.2.4",
"prettier": "^2.8.0", "prettier": "^2.6.2",
"ts-jest": "^28.0.8", "ts-jest": "^28.0.2",
"typescript": "^4.9.3" "typescript": "^4.6.4"
} }
} }

View File

@ -1,27 +0,0 @@
name: 'Restore Only Cache'
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
description: 'The same list of files, directories, and wildcard patterns to restore cache that were used while saving it'
required: true
key:
description: 'An explicit key for restoring the cache'
required: true
restore-keys:
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
required: false
outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'
key:
description: 'Key passed in the input to use in subsequent steps of the workflow'
matched-key:
description: 'Cache key restored'
runs:
using: 'node16'
main: '../dist/restore-only/index.js'
branding:
icon: 'archive'
color: 'gray-dark'

View File

@ -1,27 +0,0 @@
name: 'Restore Cache'
description: 'Restore Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
description: 'The same list of files, directories, and wildcard patterns to restore cache that were used while saving it'
required: true
key:
description: 'An explicit key for restoring the cache'
required: true
restore-keys:
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
required: false
outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'
key:
description: 'Key passed in the input to use in subsequent steps of the workflow'
matched-key:
description: 'Cache key restored'
runs:
using: 'node16'
main: '../dist/restore/index.js'
branding:
icon: 'archive'
color: 'gray-dark'

View File

@ -1,22 +0,0 @@
name: 'Save Only Cache'
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to cache'
required: true
key:
description: 'An explicit key for saving the cache'
required: true
upload-chunk-size:
description: 'The chunk size used to split up large files during upload, in bytes'
required: false
matched-key:
description: 'Cache key restored from the restore action'
required: false
runs:
using: 'node16'
main: '../dist/save-only/index.js'
branding:
icon: 'archive'
color: 'gray-dark'

View File

@ -1,22 +0,0 @@
name: 'Save Cache'
description: 'Save Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to cache'
required: true
key:
description: 'An explicit key for saving the cache'
required: true
upload-chunk-size:
description: 'The chunk size used to split up large files during upload, in bytes'
required: false
matched-key:
description: 'Cache key restored from the restore action'
required: false
runs:
using: 'node16'
main: '../dist/save/index.js'
branding:
icon: 'archive'
color: 'gray-dark'

View File

@ -6,9 +6,7 @@ export enum Inputs {
} }
export enum Outputs { export enum Outputs {
CacheHit = "cache-hit", CacheHit = "cache-hit"
Key = "key",
MatchedKey = "matched-key"
} }
export enum State { export enum State {

View File

@ -1,8 +1,60 @@
import { StateProvider } from "./stateProvider"; import * as cache from "@actions/cache";
import restoreImpl from "./restoreImpl"; import * as core from "@actions/core";
import { Events, Inputs, State } from "./constants";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> { async function run(): Promise<void> {
await restoreImpl(new StateProvider()); try {
if (!utils.isCacheFeatureAvailable()) {
utils.setCacheHitOutput(false);
return;
}
// Validate inputs, this can cause task failure
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.`
);
return;
}
const primaryKey = core.getInput(Inputs.Key, { required: true });
core.saveState(State.CachePrimaryKey, primaryKey);
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const cacheKey = await cache.restoreCache(
cachePaths,
primaryKey,
restoreKeys
);
if (!cacheKey) {
core.info(
`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`
);
return;
}
// Store the matched cache key
utils.setCacheState(cacheKey);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheKey}`);
} catch (error: unknown) {
core.setFailed((error as Error).message);
}
} }
run(); run();

View File

@ -1,70 +0,0 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { Events, Inputs, Outputs, State } from "./constants";
import { IStateProvider } from "./stateProvider";
import * as utils from "./utils/actionUtils";
async function restoreImpl(
stateProvider: IStateProvider
): Promise<string | undefined> {
try {
if (!utils.isCacheFeatureAvailable()) {
utils.setCacheHitOutput(false);
return;
}
// Validate inputs, this can cause task failure
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.`
);
return;
}
const primaryKey = core.getInput(Inputs.Key, { required: true });
stateProvider.setState(State.CachePrimaryKey, primaryKey);
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const cacheKey = await cache.restoreCache(
cachePaths,
primaryKey,
restoreKeys
);
if (!cacheKey) {
core.info(
`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`
);
return;
}
// Store the matched cache key in states
stateProvider.setState(State.CacheMatchedKey, cacheKey);
core.debug("setState: " + State.CacheMatchedKey + " " + cacheKey);
const isExactKeyMatch = utils.isExactKeyMatch(
core.getInput(Inputs.Key, { required: true }),
cacheKey
);
core.setOutput(Outputs.CacheHit, isExactKeyMatch.toString());
core.info(`Cache restored from key: ${cacheKey}`);
return cacheKey;
} catch (error: unknown) {
core.setFailed((error as Error).message);
}
}
export default restoreImpl;

View File

@ -1,10 +0,0 @@
import restoreImpl from "./restoreImpl";
import { NullStateProvider } from "./stateProvider";
async function run(): Promise<void> {
await restoreImpl(new NullStateProvider());
}
run();
export default run;

View File

@ -1,8 +1,59 @@
import saveImpl from "./saveImpl"; import * as cache from "@actions/cache";
import { StateProvider } from "./stateProvider"; import * as core from "@actions/core";
import { Events, Inputs, State } from "./constants";
import * as utils from "./utils/actionUtils";
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
// throw an uncaught exception. Instead of failing this action, just warn.
process.on("uncaughtException", e => utils.logWarning(e.message));
async function run(): Promise<void> { async function run(): Promise<void> {
await saveImpl(new StateProvider()); try {
if (!utils.isCacheFeatureAvailable()) {
return;
}
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.`
);
return;
}
const state = utils.getCacheState();
// Inputs are re-evaluted before the post action, so we want the original key used for restore
const primaryKey = core.getState(State.CachePrimaryKey);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
return;
}
if (utils.isExactKeyMatch(primaryKey, state)) {
core.info(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
return;
}
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
} catch (error: unknown) {
utils.logWarning((error as Error).message);
}
} }
run(); run();

View File

@ -1,65 +0,0 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { Events, Inputs, State } from "./constants";
import { IStateProvider } from "./stateProvider";
import * as utils from "./utils/actionUtils";
// Catch and log any unhandled exceptions. These exceptions can leak out of the uploadChunk method in
// @actions/toolkit when a failed upload closes the file descriptor causing any in-process reads to
// throw an uncaught exception. Instead of failing this action, just warn.
process.on("uncaughtException", e => utils.logWarning(e.message));
async function saveImpl(stateProvider: IStateProvider): Promise<void> {
try {
if (!utils.isCacheFeatureAvailable()) {
return;
}
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.`
);
return;
}
// If restore has stored a primary key in state, reuse that
// Else re-evaluate from inputs
const primaryKey =
stateProvider.getState(State.CachePrimaryKey) ||
core.getInput(Inputs.Key);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
return;
}
// If matched restore key is same as primary key, then do not save cache
// NO-OP in case of SaveOnly action
const state = stateProvider.getCacheState();
if (utils.isExactKeyMatch(primaryKey, state)) {
core.info(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
return;
}
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
} catch (error: unknown) {
utils.logWarning((error as Error).message);
}
}
export default saveImpl;

View File

@ -1,10 +0,0 @@
import saveImpl from "./saveImpl";
import { NullStateProvider } from "./stateProvider";
async function run(): Promise<void> {
await saveImpl(new NullStateProvider());
}
run();
export default run;

View File

@ -1,42 +0,0 @@
import * as core from "@actions/core";
import { State } from "./constants";
export interface IStateProvider {
//setOutput(key: string, value: string): void;
setState(key: string, value: string): void;
getState(key: string): string;
getCacheState(): string | undefined;
}
class StateProviderBase implements IStateProvider {
getCacheState(): string | undefined {
const cacheKey = this.getState(State.CacheMatchedKey);
if (cacheKey) {
core.debug(`Cache state/key: ${cacheKey}`);
return cacheKey;
}
return undefined;
}
// eslint-disable-next-line @typescript-eslint/no-unused-vars, @typescript-eslint/no-empty-function
setState = (key: string, value: string) => {};
// eslint-disable-next-line @typescript-eslint/no-unused-vars
getState = (key: string) => "";
}
export class StateProvider extends StateProviderBase {
//setOutput = core.setOutput;
setState = core.saveState;
getState = core.getState;
}
export class NullStateProvider extends StateProviderBase {
//setOutput = core.setOutput;
setState = core.setOutput;
// eslint-disable-next-line @typescript-eslint/no-unused-vars
getState = (key: string) => "";
}

View File

@ -1,7 +1,7 @@
import * as cache from "@actions/cache"; import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import { Outputs, RefKey } from "../constants"; import { Outputs, RefKey, State } from "../constants";
export function isGhes(): boolean { export function isGhes(): boolean {
const ghUrl = new URL( const ghUrl = new URL(
@ -19,10 +19,30 @@ export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
); );
} }
export function setCacheState(state: string): void {
core.saveState(State.CacheMatchedKey, state);
}
export function setCacheHitOutput(isCacheHit: boolean): void { export function setCacheHitOutput(isCacheHit: boolean): void {
core.setOutput(Outputs.CacheHit, isCacheHit.toString()); core.setOutput(Outputs.CacheHit, isCacheHit.toString());
} }
export function setOutputAndState(key: string, cacheKey?: string): void {
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
// Store the matched cache key if it exists
cacheKey && setCacheState(cacheKey);
}
export function getCacheState(): string | undefined {
const cacheKey = core.getState(State.CacheMatchedKey);
if (cacheKey) {
core.debug(`Cache state/key: ${cacheKey}`);
return cacheKey;
}
return undefined;
}
export function logWarning(message: string): void { export function logWarning(message: string): void {
const warningPrefix = "[warning]"; const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`); core.info(`${warningPrefix}${message}`);
@ -57,20 +77,19 @@ export function getInputAsInt(
} }
export function isCacheFeatureAvailable(): boolean { export function isCacheFeatureAvailable(): boolean {
if (cache.isFeatureAvailable()) { if (!cache.isFeatureAvailable()) {
return true; if (isGhes()) {
} logWarning(
`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
if (isGhes()) {
logWarning(
`Cache action is only supported on GHES version >= 3.5. If you are on version >=3.5 Please check with GHES admin if Actions cache service is enabled or not.
Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)` Otherwise please upgrade to GHES version >= 3.5 and If you are also using Github Connect, please unretire the actions/cache namespace before upgrade (see https://docs.github.com/en/enterprise-server@3.5/admin/github-actions/managing-access-to-actions-from-githubcom/enabling-automatic-access-to-githubcom-actions-using-github-connect#automatic-retirement-of-namespaces-for-actions-accessed-on-githubcom)`
); );
} else {
logWarning(
"An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."
);
}
return false; return false;
} }
logWarning( return true;
"An internal error has occurred in cache backend. Please check https://www.githubstatus.com/ for any ongoing issue in actions."
);
return false;
} }

View File

@ -1,9 +1,9 @@
## Cache segment restore timeout #### Cache segment restore timeout
A cache gets downloaded in multiple segments of fixed sizes (`1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.8` of `actions/cache` introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss. A cache gets downloaded in multiple segments of fixed sizes (`1GB` for a `32-bit` runner and `2GB` for a `64-bit` runner). Sometimes, a segment download gets stuck which causes the workflow job to be stuck forever and fail. Version `v3.0.8` of `actions/cache` introduces a segment download timeout. The segment download timeout will allow the segment download to get aborted and hence allow the job to proceed with a cache miss.
Default value of this timeout is 60 minutes and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes. Default value of this timeout is 60 minutes and can be customized by specifying an [environment variable](https://docs.github.com/en/actions/learn-github-actions/environment-variables) named `SEGMENT_DOWNLOAD_TIMEOUT_MINS` with timeout value in minutes.
## Update a cache #### Update a cache
A cache today is immutable and cannot be updated. But some use cases require the cache to be saved even though there was a "hit" during restore. To do so, use a `key` which is unique for every run and use `restore-keys` to restore the nearest cache. For example: A cache today is immutable and cannot be updated. But some use cases require the cache to be saved even though there was a "hit" during restore. To do so, use a `key` which is unique for every run and use `restore-keys` to restore the nearest cache. For example:
```yaml ```yaml
- name: update cache on every commit - name: update cache on every commit
@ -16,10 +16,10 @@ A cache today is immutable and cannot be updated. But some use cases require the
``` ```
Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits). Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits).
## Use cache across feature branches #### Use cache across feature branches
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches. Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
## Improving cache restore performance on Windows/Using cross-os caching #### Improving cache restore performance on Windows/Using cross-os caching
Currently, cache restore is slow on Windows due to tar being inherently slow and the compression algorithm `gzip` in use. `zstd` is the default algorithm in use on linux and macos. It was disabled on Windows due to issues with bsd tar(libarchive), the tar implementation in use on Windows. Currently, cache restore is slow on Windows due to tar being inherently slow and the compression algorithm `gzip` in use. `zstd` is the default algorithm in use on linux and macos. It was disabled on Windows due to issues with bsd tar(libarchive), the tar implementation in use on Windows.
To improve cache restore performance, we can re-enable `zstd` as the compression algorithm using the following workaround. Add the following step to your workflow before the cache step: To improve cache restore performance, we can re-enable `zstd` as the compression algorithm using the following workaround. Add the following step to your workflow before the cache step:
@ -35,49 +35,4 @@ To improve cache restore performance, we can re-enable `zstd` as the compression
The `cache` action will use GNU tar instead of bsd tar on Windows. This should work on all Github Hosted runners as it is. For self-hosted runners, please ensure you have GNU tar and `zstd` installed. The `cache` action will use GNU tar instead of bsd tar on Windows. This should work on all Github Hosted runners as it is. For self-hosted runners, please ensure you have GNU tar and `zstd` installed.
The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key. The above workaround is also needed if you wish to use cross-os caching since difference of compression algorithms will result in different cache versions for the same cache key. So the above workaround will ensure `zstd` is used for caching on all platforms thus resulting in the same cache version for the same cache key.
## Force deletion of caches overriding default cache eviction policy
Caches have [branch scope restriction](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache) in place. This means that if caches for a specific branch are using a lot of storage quota, it may result into more frequently used caches from `default` branch getting thrashed. For example, if there are many pull requests happening on a repo and are creating caches, these cannot be used in default branch scope but will still occupy a lot of space till they get cleaned up by [eviction policy](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#usage-limits-and-eviction-policy). But sometime we want to clean them up on a faster cadence so as to ensure default branch is not thrashing. In order to achieve this, [gh-actions-cache cli](https://github.com/actions/gh-actions-cache/) can be used to delete caches for specific branches.
This workflow uses `gh-actions-cache` to delete all the caches created by a branch.
<details>
<summary>Example</summary>
```yaml
name: cleanup caches by a branch
on:
pull_request:
types:
- closed
workflow_dispatch:
jobs:
cleanup:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v3
- name: Cleanup
run: |
gh extension install actions/gh-actions-cache
REPO=${{ github.repository }}
BRANCH=${{ github.ref }}
echo "Fetching list of cache key"
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
## Setting this to not fail the workflow while deleting cache keys.
set +e
echo "Deleting caches..."
for cacheKey in $cacheKeysForPR
do
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
done
echo "Done"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
```
</details>