Compare commits

...

84 Commits

Author SHA1 Message Date
fe1055e9d1 Release v1.1.1 2020-02-05 10:01:01 -05:00
fab26f3f4f Bump version to 1.1.1 2020-02-05 09:55:35 -05:00
4887979af8 proxy support (#166)
* Replace typed rest client with new http-client

* Send Content-Type: application/json and fix up some types

* Lint

* Consume @actions/http-client:1.0.5

* Consume @actions/http-client:1.0.6

* Dont send headers manually, http-client automatically will
2020-02-05 09:24:37 -05:00
f9c9166ecb Increase cache limit to 5 GBs (#168)
* Increase cache limit to 5 GBs

* Fix test to use new size limit

* Update src/save.ts

Co-Authored-By: Josh Gross <joshmgross@github.com>

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-02-01 16:11:02 -05:00
23e301d35c Disable fail-fast to get full coverage of failures 2020-01-29 20:34:56 -05:00
e43776276f Add Swift Package Manager (SPM) example (#159)
* Add Swift - SPM to examples

* Add link SPM example link to readme

* remove extra newline

* remove another extra newline
2020-01-29 11:13:59 -05:00
b6d538e2aa Add renv examples (#151)
* Add renv examples

* Add link in main readme.md
2020-01-21 19:22:40 -05:00
296374f6c9 Update action's description (#75)
* README: clarify case on the action

* Update description
2020-01-14 10:11:41 -05:00
6c11532937 Update Ruby docs. "Gem" -> "Bundler" (#150)
* Use "Bundler" which is the package manager

"Gem" isn't wrong, but not typically what a Ruby developer would think of.

* Update links

* Update links
2020-01-12 18:48:43 -05:00
c33bff8d72 Add Scala - SBT example (#134)
* Add Scala - SBT example

* Add Scala - SBT example to README
2020-01-10 17:09:06 -05:00
d1991bb4c5 Add Haskell - Cabal example (#148)
* Add Haskell - Cabal example

* Add link in main readme.md
2020-01-10 17:07:52 -05:00
60e292adf7 Update cache limits (#140) 2020-01-07 15:01:47 -05:00
a505c2e7a6 Merge branch 'master' into releases/v1 2020-01-06 14:10:16 -05:00
c262ac0154 Fix number parsing issues 2020-01-06 14:06:24 -05:00
10a14413e7 Update release binaries 2020-01-06 13:51:23 -05:00
cf4f44db70 Fix invalid array 2020-01-06 13:50:39 -05:00
4c4974aff1 Release v1.1 2020-01-06 13:36:33 -05:00
1da52de10f npm audit fix 2020-01-06 13:31:03 -05:00
b45d91cc4b Chunked Cache Upload APIs (#128)
* Initial pass at chunked upload apis

* Fix cacheEntry type

* Linting

* Fix download cache entry tests

* Linting tests

* Pull in fixes from testing branch

* Fix typo in ReserveCacheResponse

* Add test convering reserve cache failure

* Add retries to upload chunk

* PR feedback

* Format default chunk size

* Remove responses array
2020-01-06 13:05:50 -05:00
a631fadf14 README.md: fix grammar error (#136)
"it's" is short for "it is," but the use in this sentence is as a
possessive - something belonging to "it" - hence, "its" is correct.
2019-12-23 10:30:34 -05:00
e223b0a12d Merge pull request #124 from nogic1008/patch-1
Add Another C# Example to use personal cache folder
2019-12-16 10:24:25 -05:00
decbafc350 Update examples.md
Co-Authored-By: Chris Patterson <chrispat@github.com>
2019-12-16 09:45:29 +09:00
3854a40aee Use BSD tar on windows (#126)
* Use BSD tar on windows

* Linting

* Fallback to which tar if no system tar

* Fix formatting

* Bump prettier and typescript
2019-12-13 17:24:37 -05:00
0188dffc5a Revert original C# Example
* Treat "Use Personal Cache Folder" way as another C# example
* Describe the situation in which another example should be used
2019-12-13 10:03:43 +09:00
002d3a77f4 Use Personal Cache Folder in C# Example
Ref: #115
2019-12-10 09:21:47 +09:00
4809f4ada4 Add list of implementation examples. (#116)
More visibility into the samples by having it on the main README. Easier to see, better SEO.
2019-12-07 18:25:23 -05:00
3d01b4eb53 Update Ruby example in documentation to specify bundler path (#113)
* Update Ruby example to specify bundler path

* Fix spacing
2019-11-23 14:13:50 -05:00
cffae9552b Release v1.0.3 2019-11-21 14:57:29 -05:00
95c1798369 Remove validation failures and warning annotations (#108)
* Update warnings behavior

* Add void return type
2019-11-21 14:37:54 -05:00
639f9d8b81 Mask download URL in logs (#110) 2019-11-21 14:37:32 -05:00
d9fe1b81f9 Release 1.0.2 2019-11-19 11:55:11 -05:00
92ae3b63f8 Update badge link 2019-11-15 15:04:12 -05:00
44543250bd Release 1.0.2 2019-11-15 10:31:02 -05:00
6491e51b66 Merge master into releases/v1 2019-11-15 10:29:58 -05:00
84b3b283f0 Await io mkdirP (#100) 2019-11-15 10:25:57 -05:00
8d14a2150b Add unit tests for save (#98)
* Clean up args and arrange imports

* Arrange args in restore tests

* Add unit tests for save

* Use const instead of let (linting)
2019-11-14 17:14:16 -05:00
c0584c42d1 Add unit tests for actionUtils (#93)
* Add unit tests for actionUtils

* Fix file size on ubuntu and test name

* Remove unused import
2019-11-13 16:13:00 -05:00
bb828da54c Format cache size and display on info (#85) 2019-11-13 11:00:46 -05:00
7e7aef2963 Add pip examples (#86) 2019-11-13 10:55:05 -05:00
b7d83b4095 Provide better errors for unsupported event types (#68)
* Validate event type during restore

* PR Feedback

* Format

* Linting
2019-11-13 10:54:39 -05:00
50a2fdee6f Update yarn cache example (#70)
* Update yarn cache example

* Update examples.md

Co-Authored-By: Eric Taylor <erictaylor89@gmail.com>
2019-11-13 10:18:47 -05:00
f0cbadd748 Use cache in workflows (#90) 2019-11-12 17:48:19 -05:00
4657a5f525 Fix lint on Windows (#89) 2019-11-12 17:01:15 -05:00
fb50aa45ec Add initial eslint setup (#88) 2019-11-12 16:48:02 -05:00
31508256ff Update README.md (#76) 2019-11-12 15:33:22 -05:00
bc821d0c12 Remove recommendation to cache node_modules (#69)
* Update npm caching examples

* Fix output name

* Remove extra details tag
2019-11-07 21:04:46 -05:00
bde557aefd Fix PR filters 2019-11-07 20:02:06 -05:00
4b0709a0d5 Add unit tests for restore (#62)
* Move archive file size to utils

* Disable net connect with nock

* Add unit tests for restore

* Fix test names and test URL
2019-11-06 13:41:45 -05:00
ecf6eea708 Add PHP Composer example (#32) 2019-11-05 16:18:49 -05:00
86dff562ab v1.0.1 release binaries 2019-11-05 15:43:33 -05:00
0f810ad45a Release v1.0.1 2019-11-05 15:42:18 -05:00
eb10706a9d Bump version to v1.0.1 and audit fix 2019-11-05 15:40:20 -05:00
30524a6fbd Tweak 'Cache not found' message (#54)
Previously the message was like this:

```
Cache not found for input keys: ["xxx",""]
```

Note the empty entry at the end because `String.prototype.split` results
in an array with one empty string if there was nothing to split.

Now it looks like:

```
Cache not found for input keys: xxx
```
2019-11-05 15:33:41 -05:00
b034b26a44 Bump cache limit to 400MB (#61) 2019-11-05 15:24:22 -05:00
e1ed41a9c9 Link to docs (#58)
* Link to docs

* Attempt to default to user's browser language first
2019-11-05 14:09:13 -05:00
5f4d4d4555 Alphabetise examples (#52) 2019-11-05 12:04:07 -05:00
5d3ad75a2b Update example formatting (#57)
* adjust formatting of Carthage example

* enable syntax highlighting for Cargo example
2019-11-05 11:03:56 -05:00
d8c5e69fe2 Update badge to filter to master push events 2019-11-05 10:59:26 -05:00
f66a56e59e Bump version to v1 (#51) 2019-11-04 16:40:33 -05:00
9d8c7b4041 Release v1 2019-11-04 15:13:15 -05:00
21f72b1fcc Bump package version 2019-11-04 12:04:13 -05:00
ce4a52af49 Stop warning when cache is not found (#40)
The cache not being found is a common situation so very visible warning
is a little too much.
2019-11-04 11:03:18 -05:00
57f889e86e Add cargo example for Rust project (#8)
* Add cargo example

* Add hash of Cargo.lock to keys of caches

* Move Rust example to examples.md
2019-11-04 10:15:02 -05:00
8c4c641fa0 Add Elixir Mix example (#42)
* Add Elixir Mix example

* Fix typo
2019-11-03 22:44:06 -05:00
fe98aa6782 Fix repo name in contact email (#41) 2019-11-03 09:38:45 -05:00
87c0185d27 Add Go modules example (#18)
* Add Go modules example

* Fix TOC
2019-11-01 15:27:43 -04:00
25e0c8faac Remove cache checksum debug - close #24 (#26)
* Remove cache checksum debug - close #24

*  Remove cache checksum debug on save

* Fix formatting
2019-11-01 15:24:11 -04:00
d384987622 Time based eviction interval is 1 week (#34) 2019-11-01 14:20:27 -04:00
7058277028 Fix typo in error message (#29) 2019-11-01 10:35:38 -04:00
2523667574 Add note about time-based eviction to README (#30) 2019-11-01 10:35:10 -04:00
0e86554410 Ignore all .md files 2019-11-01 10:22:36 -04:00
aae1376422 Exclude documentation from CI tests (#28) 2019-10-31 17:22:35 -04:00
fc310d4670 Fix README.md (#25)
`restore-keys` had incorrect indentation.
2019-10-31 17:19:43 -04:00
287ee0671b Add trailing dash to Maven fallback key (#19) 2019-10-31 15:08:09 -04:00
c401b561b2 Link to NuGet lock files documentation (#20) 2019-10-31 15:06:47 -04:00
526c940a94 Prevent commands from executing during tests (#21)
* Prevent commands from executing during tests

* Add newline at end of file

* Drop all commands from output
2019-10-31 15:05:59 -04:00
d676b6c354 Update README.md 2019-10-31 13:01:04 -04:00
6be35d19ef Minor typo in README (#15)
from `steps.[ID].outupts.cache-hit` to `steps.[ID].outputs.cache-hit`
2019-10-31 12:06:17 -04:00
3743276c66 Move examples to their own page (#13) 2019-10-31 11:36:50 -04:00
6e37fd16ea Add Carthage example (#10) 2019-10-31 10:52:32 -04:00
83bb08ded3 Add Cocoapods example (#5) 2019-10-31 10:47:43 -04:00
7611296bb3 Add Ruby Gem example (#4) 2019-10-31 10:45:47 -04:00
889c603691 Update workflow (#1)
* Run workflow on linux, mac, and windows

* Add status badge

* Use npm install instead

* Bump typescript version

* Use node 12.x
2019-10-31 10:37:00 -04:00
b2cac08cff Create CODE_OF_CONDUCT.md 2019-10-30 15:34:55 -04:00
26 changed files with 9365 additions and 333 deletions

16
.eslintrc.json Normal file
View File

@ -0,0 +1,16 @@
{
"env": { "node": true, "jest": true },
"parser": "@typescript-eslint/parser",
"parserOptions": { "ecmaVersion": 2020, "sourceType": "module" },
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"plugin:import/errors",
"plugin:import/warnings",
"plugin:import/typescript",
"plugin:prettier/recommended",
"prettier/@typescript-eslint"
],
"plugins": ["@typescript-eslint", "jest"]
}

View File

@ -1,20 +1,54 @@
name: Test Cache Action
name: Tests
on:
pull_request:
branches:
- master
paths-ignore:
- '**.md'
push:
branches:
- master
paths-ignore:
- '**.md'
jobs:
test:
runs-on: ubuntu-latest
name: Test on ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: '12.x'
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- run: npm ci
- name: Prettier Format Check
run: npm run format-check
- name: ESLint Check
run: npm run lint
- name: Build & Test
run: npm run test

3
.gitignore vendored
View File

@ -94,3 +94,6 @@ typings/
# DynamoDB Local files
.dynamodb/
# Text editor files
.vscode/

76
CODE_OF_CONDUCT.md Normal file
View File

@ -0,0 +1,76 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to make participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies within all project spaces, and it also applies when
an individual is representing the project or its community in public spaces.
Examples of representing a project or community include using an official
project e-mail address, posting via an official social media account, or acting
as an appointed representative at an online or offline event. Representation of
a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at opensource+actions/cache@github.com. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq

112
README.md
View File

@ -1,6 +1,12 @@
# cache
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
This action allows caching dependencies and build outputs to improve workflow execution time.
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
## Documentation
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
## Usage
@ -22,87 +28,59 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
### Example workflow
```yaml
name: Example Caching with npm
name: Caching Primes
on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- name: Cache node_modules
uses: actions/cache@preview
- name: Cache Primes
id: cache-primes
uses: actions/cache@v1
with:
path: node_modules
key: ${{ runner.os }}-node
path: prime-numbers
key: ${{ runner.os }}-primes
- name: Install Dependencies
run: npm install
- name: Build
run: npm run build
- name: Generate Prime Numbers
if: steps.cache-primes.outputs.cache-hit != 'true'
run: /generate-primes.sh -d prime-numbers
- name: Test
run: npm run test
```
## Ecosystem Examples
### Node - npm
```yaml
- uses: actions/cache@preview
with:
path: node_modules
key: ${{ runner.os }}-node
- name: Use Prime Numbers
run: /primes.sh -d prime-numbers
```
### Node - Yarn
## Implementation Examples
```yaml
- uses: actions/cache@preview
with:
path: ~/.cache/yarn
key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }}
restore-keys: |
${{ runner.os }}-yarn-
```
Every programming language and framework has its own way of caching.
### C# - Nuget
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
```yaml
- uses: actions/cache@preview
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
### Java - Gradle
```yaml
- uses: actions/cache@preview
with:
path: ~/.gradle/caches
key: gradle-${{ runner.os }}-${{ hashFiles('**/*.gradle') }}
restore-keys: |
gradle-${{ runner.os }}-
```
### Java - Maven
```yaml
- uses: actions/cache@preview
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven
```
- [C# - Nuget](./examples.md#c---nuget)
- [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules)
- [Haskell - Cabal](./examples.md#haskell---cabal)
- [Java - Gradle](./examples.md#java---gradle)
- [Java - Maven](./examples.md#java---maven)
- [Node - npm](./examples.md#node---npm)
- [Node - Yarn](./examples.md#node---yarn)
- [PHP - Composer](./examples.md#php---composer)
- [Python - pip](./examples.md#python---pip)
- [R - renv](./examples.md#r---renv)
- [Ruby - Bundler](./examples.md#ruby---bundler)
- [Rust - Cargo](./examples.md#rust---cargo)
- [Scala - SBT](./examples.md#scala---sbt)
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
## Cache Limits
Individual caches are limited to 200MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed.
A repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
## Skipping steps based on cache-hit
@ -113,21 +91,21 @@ Example:
steps:
- uses: actions/checkout@v1
- uses: actions/cache@preview
- uses: actions/cache@v1
id: cache
with:
path: path/to/dependencies
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles')}}
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: /install.sh
```
> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outupts.cache-hit`)
> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outputs.cache-hit`)
## Contributing
We would love for you to contribute to `@actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
## License
The scripts and documentation in this project are released under the [MIT License](LICENSE)
The scripts and documentation in this project are released under the [MIT License](LICENSE)

View File

@ -0,0 +1 @@
hello world

View File

@ -0,0 +1,236 @@
import * as core from "@actions/core";
import * as os from "os";
import * as path from "path";
import { Events, Outputs, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import * as actionUtils from "../src/utils/actionUtils";
jest.mock("@actions/core");
jest.mock("os");
afterEach(() => {
delete process.env[Events.Key];
});
test("getArchiveFileSize returns file size", () => {
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
const size = actionUtils.getArchiveFileSize(filePath);
expect(size).toBe(11);
});
test("isExactKeyMatch with undefined cache entry returns false", () => {
const key = "linux-rust";
const cacheEntry = undefined;
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with empty cache entry returns false", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with different keys returns false", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with different key accents returns false", () => {
const key = "linux-áccent";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-accent"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with same key returns true", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
});
test("isExactKeyMatch with same key and different casing returns true", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "LINUX-RUST"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
});
test("setOutputAndState with undefined entry to set cache-hit output", () => {
const key = "linux-rust";
const cacheEntry = undefined;
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledTimes(0);
});
test("setOutputAndState with exact match to set cache-hit output and state", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust"
};
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledWith(
State.CacheResult,
JSON.stringify(cacheEntry)
);
expect(saveStateMock).toHaveBeenCalledTimes(1);
});
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"
};
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledWith(
State.CacheResult,
JSON.stringify(cacheEntry)
);
expect(saveStateMock).toHaveBeenCalledTimes(1);
});
test("getCacheState with no state returns undefined", () => {
const getStateMock = jest.spyOn(core, "getState");
getStateMock.mockImplementation(() => {
return "";
});
const state = actionUtils.getCacheState();
expect(state).toBe(undefined);
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
expect(getStateMock).toHaveBeenCalledTimes(1);
});
test("getCacheState with valid state", () => {
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
const getStateMock = jest.spyOn(core, "getState");
getStateMock.mockImplementation(() => {
return JSON.stringify(cacheEntry);
});
const state = actionUtils.getCacheState();
expect(state).toEqual(cacheEntry);
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
expect(getStateMock).toHaveBeenCalledTimes(1);
});
test("logWarning logs a message with a warning prefix", () => {
const message = "A warning occurred.";
const infoMock = jest.spyOn(core, "info");
actionUtils.logWarning(message);
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`);
});
test("isValidEvent returns false for unknown event", () => {
const event = "foo";
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(false);
});
test("resolvePath with no ~ in path", () => {
const filePath = ".cache/yarn";
const resolvedPath = actionUtils.resolvePath(filePath);
const expectedPath = path.resolve(filePath);
expect(resolvedPath).toBe(expectedPath);
});
test("resolvePath with ~ in path", () => {
const filePath = "~/.cache/yarn";
const homedir = jest.requireActual("os").homedir();
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return homedir;
});
const resolvedPath = actionUtils.resolvePath(filePath);
const expectedPath = path.join(homedir, ".cache/yarn");
expect(resolvedPath).toBe(expectedPath);
});
test("resolvePath with home not found", () => {
const filePath = "~/.cache/yarn";
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return "";
});
expect(() => actionUtils.resolvePath(filePath)).toThrow(
"Unable to resolve `~` to HOME"
);
});
test("isValidEvent returns true for push event", () => {
const event = Events.Push;
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});
test("isValidEvent returns true for pull request event", () => {
const event = Events.PullRequest;
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});

View File

@ -1,22 +0,0 @@
import * as core from "@actions/core";
import { Inputs } from "../src/constants";
import run from "../src/restore";
import * as testUtils from "../src/utils/testUtils";
test("restore with no path", async () => {
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
});
test("restore with no key", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: key"
);
});

403
__tests__/restore.test.ts Normal file
View File

@ -0,0 +1,403 @@
import * as core from "@actions/core";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isExactKeyMatch(key, cacheResult);
}
);
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
});
beforeEach(() => {
process.env[Events.Key] = Events.Push;
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
});
test("restore with invalid event outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with no path should fail", async () => {
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
});
test("restore with no key", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: key"
);
});
test("restore with too many keys should fail", async () => {
const key = "node-test";
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
testUtils.setInputs({
path: "node_modules",
key,
restoreKeys
});
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: Keys are limited to a maximum of 10.`
);
});
test("restore with large key should fail", async () => {
const key = "foo".repeat(512); // Over the 512 character limit
testUtils.setInputs({
path: "node_modules",
key
});
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
);
});
test("restore with invalid key should fail", async () => {
const key = "comma,comma";
testUtils.setInputs({
path: "node_modules",
key
});
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot contain commas.`
);
});
test("restore with no cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
key
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
return Promise.resolve(null);
});
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}.`
);
});
test("restore with server error should fail", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
key
});
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
throw new Error("HTTP Error Occurred");
});
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with restore keys and no cache found", async () => {
const key = "node-test";
const restoreKey = "node-";
testUtils.setInputs({
path: "node_modules",
key,
restoreKeys: [restoreKey]
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
return Promise.resolve(null);
});
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}, ${restoreKey}.`
);
});
test("restore with cache found", async () => {
const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 142;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with a pull request event and cache found", async () => {
const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key
});
process.env[Events.Key] = Events.PullRequest;
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 62915000;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with cache found for restore key", async () => {
const key = "node-test";
const restoreKey = "node-";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key,
restoreKeys: [restoreKey]
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: restoreKey,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 142;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(infoMock).toHaveBeenCalledWith(
`Cache restored from key: ${restoreKey}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});

378
__tests__/save.test.ts Normal file
View File

@ -0,0 +1,378 @@
import * as core from "@actions/core";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/core");
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => {
return jest.requireActual("../src/utils/actionUtils").getCacheState();
});
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
return jest
.requireActual("../src/utils/actionUtils")
.isExactKeyMatch(key, cacheResult);
}
);
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
return Promise.resolve("/foo/bar");
});
});
beforeEach(() => {
process.env[Events.Key] = Events.Push;
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
});
test("save with invalid event outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with no primary key in state outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return "";
});
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Error retrieving key from state.`
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with exact match returns early", async () => {
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: primaryKey,
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const createTarMock = jest.spyOn(tar, "createTar");
await run();
expect(infoMock).toHaveBeenCalledWith(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with missing input outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
await run();
expect(logWarningMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with large cache outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize;
});
await run();
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with server error outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest
.spyOn(cacheHttpClient, "saveCache")
.mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred");
});
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
expect(failedMock).toHaveBeenCalledTimes(0);
});

58
__tests__/tar.test.ts Normal file
View File

@ -0,0 +1,58 @@
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as tar from "../src/tar";
jest.mock("@actions/exec");
jest.mock("@actions/io");
beforeAll(() => {
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
});
test("extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar";
const targetDirectory = "~/.npm/cache";
await tar.extractTar(archivePath, targetDirectory);
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-xz",
"-f",
archivePath,
"-C",
targetDirectory
]);
});
test("create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar";
const sourceDirectory = "~/.npm/cache";
await tar.createTar(archivePath, sourceDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-cz",
"-f",
archivePath,
"-C",
sourceDirectory,
"."
]);
});

View File

@ -1,5 +1,5 @@
name: 'Cache'
description: 'Cache dependencies and build outputs to improve workflow execution time'
name: 'Cache Artifacts'
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub'
inputs:
path:
@ -21,4 +21,4 @@ runs:
post-if: 'success()'
branding:
icon: 'archive'
color: 'gray-dark'
color: 'gray-dark'

3091
dist/restore/index.js vendored Normal file

File diff suppressed because it is too large Load Diff

3072
dist/save/index.js vendored Normal file

File diff suppressed because it is too large Load Diff

387
examples.md Normal file
View File

@ -0,0 +1,387 @@
# Examples
- [C# - NuGet](#c---nuget)
- [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules)
- [Haskell - Cabal](#haskell---cabal)
- [Java - Gradle](#java---gradle)
- [Java - Maven](#java---maven)
- [Node - npm](#node---npm)
- [Node - Yarn](#node---yarn)
- [PHP - Composer](#php---composer)
- [Python - pip](#python---pip)
- [R - renv](#r---renv)
- [Ruby - Bundler](#ruby---bundler)
- [Rust - Cargo](#rust---cargo)
- [Scala - SBT](#scala---sbt)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](#swift---swift-package-manager)
## C# - NuGet
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml
- uses: actions/cache@v1
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
Depending on the environment, huge packages might be pre-installed in the global cache folder.
If you do not want to include them, consider to move the cache folder like below.
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
```yaml
env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps:
- uses: actions/cache@v1
with:
path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
## Elixir - Mix
```yaml
- uses: actions/cache@v1
with:
path: deps
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
restore-keys: |
${{ runner.os }}-mix-
```
## Go - Modules
```yaml
- uses: actions/cache@v1
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
```
## Haskell - Cabal
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
```yaml
- uses: actions/cache@v1
name: Cache ~/.cabal/packages
with:
path: ~/.cabal/packages
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
- uses: actions/cache@v1
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle
with:
path: dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
```
## Java - Gradle
```yaml
- uses: actions/cache@v1
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
restore-keys: |
${{ runner.os }}-gradle-
```
## Java - Maven
```yaml
- uses: actions/cache@v1
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
```
## Node - npm
For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache
>Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci`
### macOS and Ubuntu
```yaml
- uses: actions/cache@v1
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
### Windows
```yaml
- uses: actions/cache@v1
with:
path: ~\AppData\Roaming\npm-cache
key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
### Using multiple systems and `npm config`
```yaml
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
## Node - Yarn
The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info.
```yaml
- name: Get yarn cache
id: yarn-cache
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
```
## PHP - Composer
```yaml
- name: Get Composer Cache Directory
id: composer-cache
run: |
echo "::set-output name=dir::$(composer config cache-files-dir)"
- uses: actions/cache@v1
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: |
${{ runner.os }}-composer-
```
## Python - pip
For pip, the cache directory will vary by OS. See https://pip.pypa.io/en/stable/reference/pip_install/#caching
Locations:
- Ubuntu: `~/.cache/pip`
- Windows: `~\AppData\Local\pip\Cache`
- macOS: `~/Library/Caches/pip`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Caches/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
### Using a script to get cache location
> Note: This uses an internal pip API and may not always work
```yaml
- name: Get pip cache
id: pip-cache
run: |
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
- uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
## R - renv
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
Locations:
- Ubuntu: `~/.local/share/renv`
- macOS: `~/Library/Application Support/renv`
- Windows: `%LOCALAPPDATA%/renv`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Application Support/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
## Ruby - Bundler
```yaml
- uses: actions/cache@v1
with:
path: vendor/bundle
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
${{ runner.os }}-gems-
```
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
```yaml
- name: Bundle install
run: |
bundle config path vendor/bundle
bundle install --jobs 4 --retry 3
```
## Rust - Cargo
```yaml
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
```
## Scala - SBT
```yaml
- name: Cache SBT ivy cache
uses: actions/cache@v1
with:
path: ~/.ivy2/cache
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
- name: Cache SBT
uses: actions/cache@v1
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
```
## Swift, Objective-C - Carthage
```yaml
- uses: actions/cache@v1
with:
path: Carthage
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
restore-keys: |
${{ runner.os }}-carthage-
```
## Swift, Objective-C - CocoaPods
```yaml
- uses: actions/cache@v1
with:
path: Pods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: |
${{ runner.os }}-pods-
```
## Swift - Swift Package Manager
```yaml
- uses: actions/cache@v1
with:
path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
restore-keys: |
${{ runner.os }}-spm-
```

View File

@ -1,11 +1,23 @@
require("nock").disableNetConnect();
module.exports = {
clearMocks: true,
moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
testRunner: 'jest-circus/runner',
transform: {
'^.+\\.ts$': 'ts-jest'
},
verbose: true
}
clearMocks: true,
moduleFileExtensions: ["js", "ts"],
testEnvironment: "node",
testMatch: ["**/*.test.ts"],
testRunner: "jest-circus/runner",
transform: {
"^.+\\.ts$": "ts-jest"
},
verbose: true
};
const processStdoutWrite = process.stdout.write.bind(process.stdout);
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
process.stdout.write = (str, encoding, cb) => {
// Core library will directly call process.stdout.write for commands
// We don't want :: commands to be executed by the runner during tests
if (!str.match(/^::/)) {
return processStdoutWrite(str, encoding, cb);
}
};

1083
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,12 +1,13 @@
{
"name": "cache",
"version": "0.0.1",
"version": "1.1.1",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
"scripts": {
"build": "tsc",
"test": "tsc --noEmit && jest --coverage",
"lint": "eslint **/*.ts --cache",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/"
@ -25,19 +26,28 @@
"dependencies": {
"@actions/core": "^1.2.0",
"@actions/exec": "^1.0.1",
"@actions/http-client": "^1.0.6",
"@actions/io": "^1.0.1",
"typed-rest-client": "^1.5.0",
"uuid": "^3.3.3"
},
"devDependencies": {
"@types/jest": "^24.0.13",
"@types/nock": "^11.1.0",
"@types/node": "^12.0.4",
"@types/uuid": "^3.4.5",
"@typescript-eslint/eslint-plugin": "^2.7.0",
"@typescript-eslint/parser": "^2.7.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.6.0",
"eslint-config-prettier": "^6.5.0",
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-jest": "^23.0.3",
"eslint-plugin-prettier": "^3.1.1",
"jest": "^24.8.0",
"jest-circus": "^24.7.1",
"prettier": "1.18.2",
"nock": "^11.7.0",
"prettier": "^1.19.1",
"ts-jest": "^24.0.2",
"typescript": "^3.5.1"
"typescript": "^3.7.3"
}
}

View File

@ -1,58 +1,108 @@
import * as core from "@actions/core";
import * as fs from "fs";
import { BearerCredentialHandler } from "@actions/http-client/auth";
import { HttpClient, HttpCodes } from "@actions/http-client";
import {
IHttpClientResponse,
IRequestOptions,
ITypedResponse
} from "@actions/http-client/interfaces";
import {
ArtifactCacheEntry,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
} from "./contracts";
import * as utils from "./utils/actionUtils";
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
import { HttpClient } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import { RestClient, IRequestOptions } from "typed-rest-client/RestClient";
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
import { ArtifactCacheEntry } from "./contracts";
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
export async function getCacheEntry(
keys: string[]
): Promise<ArtifactCacheEntry> {
const cacheUrl = getCacheUrl();
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
""
).replace("pipelines", "artifactcache");
if (!baseUrl) {
throw new Error(
"Cache Service Url not found, unable to restore cache."
);
}
const url = `${baseUrl}_apis/artifactcache/${resource}`;
core.debug(`Resource Url: ${url}`);
return url;
}
function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
headers: {
Accept: createAcceptHeader("application/json", "6.0-preview.1")
}
};
return requestOptions;
}
function createHttpClient(): HttpClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(
keys.join(",")
)}`;
const restClient = new RestClient("actions/cache", cacheUrl, [
bearerCredentialHandler
]);
const response = await restClient.get<ArtifactCacheEntry>(
resource,
return new HttpClient(
"actions/cache",
[bearerCredentialHandler],
getRequestOptions()
);
if (response.statusCode === 204) {
throw new Error(
`Cache not found for input keys: ${JSON.stringify(keys)}.`
);
}
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
if (!cacheResult || !cacheResult.archiveLocation) {
throw new Error("Cache not found.");
}
return cacheResult;
}
export async function downloadCache(
cacheEntry: ArtifactCacheEntry,
archivePath: string
): Promise<void> {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache");
const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
await pipeResponseToStream(downloadResponse, stream);
export async function getCacheEntry(
keys: string[]
): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
const response = await httpClient.getJson<ArtifactCacheEntry>(
getCacheApiUrl(resource)
);
if (response.statusCode === 204) {
return null;
}
if (!isSuccessStatusCode(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult?.archiveLocation;
if (!cacheDownloadUrl) {
throw new Error("Cache not found.");
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
return cacheResult;
}
async function pipeResponseToStream(
@ -66,61 +116,183 @@ async function pipeResponseToStream(
});
}
export async function saveCache(stream: NodeJS.ReadableStream, key: string) {
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
export async function downloadCache(
archiveLocation: string,
archivePath: string
): Promise<void> {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache");
const downloadResponse = await httpClient.get(archiveLocation);
await pipeResponseToStream(downloadResponse, stream);
}
const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
const postUrl = cacheUrl + resource;
// Reserve Cache
export async function reserveCache(key: string): Promise<number> {
const httpClient = createHttpClient();
const restClient = new RestClient("actions/cache", undefined, [
bearerCredentialHandler
]);
const reserveCacheRequest: ReserveCacheRequest = {
key
};
const response = await httpClient.postJson<ReserveCacheResponse>(
getCacheApiUrl("caches"),
reserveCacheRequest
);
return response?.result?.cacheId ?? -1;
}
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream"
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
async function uploadChunk(
httpClient: HttpClient,
resourceUrl: string,
data: NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
core.debug(
`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
);
const additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
const response = await restClient.uploadStream<void>(
"POST",
postUrl,
stream,
requestOptions
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
return await httpClient.sendStream(
"PATCH",
resourceUrl,
data,
additionalHeaders
);
};
const response = await uploadChunkRequest();
if (isSuccessStatusCode(response.message.statusCode)) {
return;
}
if (isRetryableStatusCode(response.message.statusCode)) {
core.debug(
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
);
const retryResponse = await uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
return;
}
}
throw new Error(
`Cache service responded with ${response.message.statusCode} during chunk upload.`
);
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile(
httpClient: HttpClient,
cacheId: number,
archivePath: string
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
MAX_CHUNK_SIZE
);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
await uploadChunk(
httpClient,
resourceUrl,
chunk,
start,
end
);
}
})
);
} finally {
fs.closeSync(fd);
}
return;
}
async function commitCache(
httpClient: HttpClient,
cacheId: number,
filesize: number
): Promise<ITypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest
);
}
export async function saveCache(
cacheId: number,
archivePath: string
): Promise<void> {
const httpClient = createHttpClient();
core.debug("Upload cache");
await uploadFile(httpClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache(
httpClient,
cacheId,
cacheSize
);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
);
}
core.info("Cache saved successfully");
}
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
};
return requestOptions;
}
function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`;
}
function getCacheUrl(): string {
// Ideally we just use ACTIONS_CACHE_URL
let cacheUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
""
).replace("pipelines", "artifactcache");
if (!cacheUrl) {
throw new Error(
"Cache Service Url not found, unable to restore cache."
);
}
core.debug(`Cache Url: ${cacheUrl}`);
return cacheUrl;
}

View File

@ -1,14 +1,20 @@
export namespace Inputs {
export const Key = "key";
export const Path = "path";
export const RestoreKeys = "restore-keys";
export enum Inputs {
Key = "key",
Path = "path",
RestoreKeys = "restore-keys"
}
export namespace Outputs {
export const CacheHit = "cache-hit";
export enum Outputs {
CacheHit = "cache-hit"
}
export namespace State {
export const CacheKey = "CACHE_KEY";
export const CacheResult = "CACHE_RESULT";
export enum State {
CacheKey = "CACHE_KEY",
CacheResult = "CACHE_RESULT"
}
export enum Events {
Key = "GITHUB_EVENT_NAME",
Push = "push",
PullRequest = "pull_request"
}

13
src/contracts.d.ts vendored
View File

@ -4,3 +4,16 @@ export interface ArtifactCacheEntry {
creationTime?: string;
archiveLocation?: string;
}
export interface CommitCacheRequest {
size: number;
}
export interface ReserveCacheRequest {
key: string;
version?: string;
}
export interface ReserveCacheResponse {
cacheId: number;
}

View File

@ -1,18 +1,25 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as fs from "fs";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Inputs, State } from "./constants";
import { Events, Inputs, State } from "./constants";
import { extractTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run() {
async function run(): Promise<void> {
try {
// Validate inputs, this can cause task failure
let cachePath = utils.resolvePath(
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`
);
return;
}
const cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
@ -20,7 +27,10 @@ async function run() {
const primaryKey = core.getInput(Inputs.Key, { required: true });
core.saveState(State.CacheKey, primaryKey);
const restoreKeys = core.getInput(Inputs.RestoreKeys).split("\n");
const restoreKeys = core
.getInput(Inputs.RestoreKeys)
.split("\n")
.filter(x => x !== "");
const keys = [primaryKey, ...restoreKeys];
core.debug("Resolved Keys:");
@ -49,41 +59,37 @@ async function run() {
}
try {
let archivePath = path.join(
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
if (!cacheEntry?.archiveLocation) {
core.info(
`Cache not found for input keys: ${keys.join(", ")}.`
);
return;
}
const archivePath = path.join(
await utils.createTempDirectory(),
"cache.tgz"
);
core.debug(`Archive Path: ${archivePath}`);
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
// Store the cache result
utils.setCacheState(cacheEntry);
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(cacheEntry, archivePath);
await cacheHttpClient.downloadCache(
cacheEntry.archiveLocation,
archivePath
);
io.mkdirP(cachePath);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const args = ["-xz"];
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
args.push("--force-local");
archivePath = archivePath.replace(/\\/g, "/");
cachePath = cachePath.replace(/\\/g, "/");
}
args.push(...["-f", archivePath, "-C", cachePath]);
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
const archiveFileSize = fs.statSync(archivePath).size;
core.debug(`File Size: ${archiveFileSize}`);
await exec(`"${tarPath}"`, args);
await extractTar(archivePath, cachePath);
const isExactKeyMatch = utils.isExactKeyMatch(
primaryKey,
@ -92,17 +98,10 @@ async function run() {
utils.setCacheHitOutput(isExactKeyMatch);
core.info(
`Cache restored from key:${cacheEntry && cacheEntry.cacheKey}`
`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`
);
try {
core.info("Cache Checksum:");
await exec(`md5sum`, [`${archivePath}`]);
} catch (error) {
core.debug(`Failed to checkum with ${error}`);
}
} catch (error) {
core.warning(error.message);
utils.logWarning(error.message);
utils.setCacheHitOutput(false);
}
} catch (error) {

View File

@ -1,22 +1,29 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as fs from "fs";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Inputs, State } from "./constants";
import { Events, Inputs, State } from "./constants";
import { createTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run() {
async function run(): Promise<void> {
try {
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`
);
return;
}
const state = utils.getCacheState();
// Inputs are re-evaluted before the post action, so we want the original key used for restore
const primaryKey = core.getState(State.CacheKey);
if (!primaryKey) {
core.warning(`Error retrieving key from state.`);
utils.logWarning(`Error retrieving key from state.`);
return;
}
@ -27,54 +34,44 @@ async function run() {
return;
}
let cachePath = utils.resolvePath(
core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
if (cacheId == -1) {
core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
return;
}
core.debug(`Cache ID: ${cacheId}`);
const cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
let archivePath = path.join(
const archivePath = path.join(
await utils.createTempDirectory(),
"cache.tgz"
);
core.debug(`Archive Path: ${archivePath}`);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const args = ["-cz"];
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
args.push("--force-local");
archivePath = archivePath.replace(/\\/g, "/");
cachePath = cachePath.replace(/\\/g, "/");
}
await createTar(archivePath, cachePath);
args.push(...["-f", archivePath, "-C", cachePath, "."]);
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const fileSizeLimit = 200 * 1024 * 1024; // 200MB
const archiveFileSize = fs.statSync(archivePath).size;
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
core.warning(
`Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.`
utils.logWarning(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
);
return;
}
const stream = fs.createReadStream(archivePath);
await cacheHttpClient.saveCache(stream, primaryKey);
try {
core.info("Cache Checksum:");
await exec(`md5sum`, [`${archivePath}`]);
} catch (error) {
core.debug(`Failed to checkum with ${error}`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
await cacheHttpClient.saveCache(cacheId, archivePath);
} catch (error) {
core.warning(error.message);
utils.logWarning(error.message);
}
}

47
src/tar.ts Normal file
View File

@ -0,0 +1,47 @@
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import { existsSync } from "fs";
async function getTarPath(): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) {
return systemTar;
}
}
return await io.which("tar", true);
}
async function execTar(args: string[]): Promise<void> {
try {
await exec(`"${await getTarPath()}"`, args);
} catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
);
}
throw new Error(`Tar failed with error: ${error?.message}`);
}
}
export async function extractTar(
archivePath: string,
targetDirectory: string
): Promise<void> {
// Create directory to extract tar into
await io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
await execTar(args);
}
export async function createTar(
archivePath: string,
sourceDirectory: string
): Promise<void> {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
await execTar(args);
}

View File

@ -1,10 +1,11 @@
import * as core from "@actions/core";
import * as io from "@actions/io";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as uuidV4 from "uuid/v4";
import { Outputs, State } from "../constants";
import { Events, Outputs, State } from "../constants";
import { ArtifactCacheEntry } from "../contracts";
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
@ -32,6 +33,10 @@ export async function createTempDirectory(): Promise<string> {
return dest;
}
export function getArchiveFileSize(path: string): number {
return fs.statSync(path).size;
}
export function isExactKeyMatch(
key: string,
cacheResult?: ArtifactCacheEntry
@ -45,10 +50,18 @@ export function isExactKeyMatch(
);
}
export function setCacheState(state: ArtifactCacheEntry): void {
core.saveState(State.CacheResult, JSON.stringify(state));
}
export function setCacheHitOutput(isCacheHit: boolean): void {
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
}
export function setOutputAndState(
key: string,
cacheResult?: ArtifactCacheEntry
) {
): void {
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
// Store the cache result if it exists
cacheResult && setCacheState(cacheResult);
@ -57,25 +70,38 @@ export function setOutputAndState(
export function getCacheState(): ArtifactCacheEntry | undefined {
const stateData = core.getState(State.CacheResult);
core.debug(`State: ${stateData}`);
return (stateData && JSON.parse(stateData)) as ArtifactCacheEntry;
if (stateData) {
return JSON.parse(stateData) as ArtifactCacheEntry;
}
return undefined;
}
export function setCacheState(state: ArtifactCacheEntry) {
core.saveState(State.CacheResult, JSON.stringify(state));
}
export function setCacheHitOutput(isCacheHit: boolean) {
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
export function logWarning(message: string): void {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
}
export function resolvePath(filePath: string): string {
if (filePath[0] === "~") {
const home = os.homedir();
if (!home) {
throw new Error("Unable to resole `~` to HOME");
throw new Error("Unable to resolve `~` to HOME");
}
return path.join(home, filePath.slice(1));
}
return path.resolve(filePath);
}
export function getSupportedEvents(): string[] {
return [Events.Push, Events.PullRequest];
}
// Currently the cache token is only authorized for push and pull_request events
// All other events will fail when reading and saving the cache
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
export function isValidEvent(): boolean {
const githubEvent = process.env[Events.Key] || "";
return getSupportedEvents().includes(githubEvent);
}

View File

@ -1,7 +1,29 @@
import { Inputs } from "../constants";
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
function getInputName(name: string): string {
return `INPUT_${name.replace(/ /g, "_").toUpperCase()}`;
}
export function setInput(name: string, value: string) {
export function setInput(name: string, value: string): void {
process.env[getInputName(name)] = value;
}
interface CacheInput {
path: string;
key: string;
restoreKeys?: string[];
}
export function setInputs(input: CacheInput): void {
setInput(Inputs.Path, input.path);
setInput(Inputs.Key, input.key);
input.restoreKeys &&
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
}
export function clearInputs(): void {
delete process.env[getInputName(Inputs.Path)];
delete process.env[getInputName(Inputs.Key)];
delete process.env[getInputName(Inputs.RestoreKeys)];
}