Compare commits

..

67 Commits

Author SHA1 Message Date
18e62e1fe0 Add env var for socket timeout 2020-05-07 21:35:11 -04:00
ce9276c90e Add CodeQL Analysis workflow (#283)
* Add CodeQL Analysis workflow

* Rename .github/workflows/workflows/codeql.yml to .github/workflows/codeql.yml

* Clean up commented out stuff
2020-05-05 17:28:32 -04:00
9eb452c280 Merge pull request #270 from actions/users/aiyan/zstd
Prefer zstd over gzip
2020-05-04 10:39:28 -04:00
75cd46ec0c Use 30 as the long distance matching window to support both 32-bit and 64-bit OS 2020-05-01 14:25:15 -04:00
a5d9a3b1a6 Address PR feedback 2020-05-01 10:01:43 -04:00
97f7baa910 Use zstd instead of gzip if available
Add zstd to cache versioning
2020-04-30 14:40:17 -04:00
9ceee97d99 Bump @actions/http-client from 1.0.6 to 1.0.8 (#286)
Bumps [@actions/http-client](https://github.com/actions/http-client) from 1.0.6 to 1.0.8.
- [Release notes](https://github.com/actions/http-client/releases)
- [Changelog](https://github.com/actions/http-client/blob/master/RELEASES.md)
- [Commits](https://github.com/actions/http-client/commits)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-04-29 18:10:58 -04:00
ccf9619480 Add Python example using 'pip cache dir' to get cache location (#285)
* Fix existing example

* Add Python example using 'pip cache dir' to get cache location

* Let users decide how they install pip 20.1+
2020-04-29 14:58:19 -04:00
9f07ee13de Merge pull request #284 from actions/promisify-pipeline
Better error handling during download
2020-04-29 13:50:12 -05:00
1ed0c23029 Use promisify of stream.pipeline for downloading 2020-04-29 13:24:26 -04:00
54626c4a4f Merge pull request #269 from actions/socket-timeout
Adds socket timeout and validate file size
2020-04-29 12:21:27 -05:00
48b62c1c52 Add comment for SocketTimeout 2020-04-28 21:31:41 -04:00
9bb13c71ec Fix lint issue, build .js files 2020-04-22 18:35:16 -04:00
8b2a57849f Adds socket timeout and validate file size 2020-04-22 18:23:41 -04:00
f00dedfa6c Use checkout@v2 in README example (#258) 2020-04-16 11:50:47 -04:00
12b87469d4 Merge pull request #252 from actions/users/aiyan/fallback-to-gnu-tar
Fallback to GNU tar if BSD tar is unavailable on windows machine
2020-04-13 13:32:01 -04:00
52046d1409 Use path.sep in path replace 2020-04-13 12:20:27 -04:00
08438313d5 Fix macOs-latest test 2020-04-10 15:50:35 -04:00
7ccdf5c70d Rebase and rebuild 2020-04-10 15:34:34 -04:00
306f72536b Fix test 2020-04-10 15:33:43 -04:00
4fa017f2b7 Fallback to GNU tar if BSD tar is unavailable 2020-04-10 15:33:43 -04:00
78809b91d7 Merge pull request #250 from actions/test-relative-path
Fix caching directories outside of the working directory (relative paths)
2020-04-08 10:37:26 -05:00
a4e3c3b64e Add -P flag for tar creation 2020-04-08 10:58:38 -04:00
e5370355e6 Combine relative jobs into main test jobs 2020-04-08 10:52:52 -04:00
0e86d5c038 Update workflow.yml 2020-04-07 23:41:38 -04:00
2ba9edf492 Fix job names v2 2020-04-07 23:37:50 -04:00
f15bc7a0d9 Fix job names 2020-04-07 23:33:13 -04:00
b6b8aa78d8 Update workflow.yml 2020-04-07 23:31:27 -04:00
272268544c Add path argument to verify-cache-files.sh 2020-04-07 23:30:01 -04:00
64f8769515 Add path argument to create-cache-files.sh 2020-04-07 23:29:07 -04:00
4a724707e9 Add test for relative paths 2020-04-07 23:28:05 -04:00
f60097cd16 Fix Lerna Example (#242)
* Fix lerna example

* Fix yaml spacing
2020-04-02 10:35:07 -04:00
eb78578266 Cache multiple paths and add glob pattern support (#212)
* Allow for multiple line-delimited paths to cache

* Add initial minimatch support

* Use @actions/glob for pattern matching

* Cache multiple entries using --files-from tar input

remove known failing test

Quote tar paths

Add salt to test cache

Try reading input files from manifest

bump salt

Run test on macos

more testing

Run caching tests on 3 platforms

Run tests on self-hosted

Apparently cant reference hosted runners by name

Bump salt

wait for some time after save

more timing out

smarter waiting

Cache in tmp dir that won't be deleted

Use child_process instead of actions/exec

Revert tempDir hack

bump salt

more logging

More console logging

Use filepath to with cacheHttpClient

Test cache restoration

Revert temp dir hack

debug logging

clean up cache.yml testing

Bump salt

change debug output

build actions

* unit test coverage for caching multiple dirs

* Ensure there's a locateable test folder at homedir

* Clean up code

* Version cache with all inputs

* Unit test getCacheVersion

* Include keys in getCacheEntry request

* Clean import orders

* Use fs promises in actionUtils tests

* Update import order for to fix linter errors

* Fix remaining linter error

* Remove platform-specific test code

* Add lerna example for caching multiple dirs

* Lerna example updated to v2

Co-Authored-By: Josh Gross <joshmgross@github.com>

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-03-20 16:02:11 -04:00
22d71e33ad Update Node Windows example to find the npm cache (#223) 2020-03-18 22:05:56 -04:00
b13df3fa54 Update README.md (#213) 2020-03-18 09:44:24 -04:00
cae64ca3cd Attempt to delete the archive after extraction (#209)
This reduces storage space used once the Action has finished executing.
2020-03-18 09:43:56 -04:00
af8651e0c5 Include Kotlinscript Gradle files (#216)
Tested this with my own repo which uses a mix of `build.gradle` and `build.gradle.kts` files and this glob seems to be working correctly.

As an aside, please checkout #215 as it would make the process of verifying these globs easier!
2020-03-18 09:40:55 -04:00
6c471ae9f6 Add eslint-plugin-simple-import-sort (#219)
* Add eslint-plugin-simple-import-sort

* Update .eslintrc.json

* eslint --fix
2020-03-18 09:35:13 -04:00
206172ea8e npm audit fix (#221) 2020-03-18 09:31:59 -04:00
5833d5c131 Bump acorn from 5.7.3 to 5.7.4 (#214)
Bumps [acorn](https://github.com/acornjs/acorn) from 5.7.3 to 5.7.4.
- [Release notes](https://github.com/acornjs/acorn/releases)
- [Commits](https://github.com/acornjs/acorn/compare/5.7.3...5.7.4)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-03-17 16:12:29 -04:00
826785142a Adding examples for OCaml/esy (#199)
* Adding examples for esy as a workflow for OCaml files

* track v1 instead of v1.1.2

Co-Authored-By: Josh Gross <joshmgross@github.com>

* add link in the readme for ocaml-esy

* ocaml -> ocaml/reason

* link in readme says ocaml/reason

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-02-26 17:43:11 -05:00
8e9c167fd7 Small message change (#195)
* Small message change

Remove dot that generates confusion in wether that's part of the key or not

* Fix format-check

* Update tests
2020-02-25 14:16:36 -05:00
e8230b28a9 Use different IDs for 1) getting the directory of yarn cache 2) the cache itself (#178)
* Use different IDs for 1) getting the directory of yarn cache 2) the cache itself

Using the current example + https://github.com/actions/cache#skipping-steps-based-on-cache-hit,

I came to a wrong conclusion that I could skip a step
if the `cache-hit` was `true` -
the ID I used was from the wrong step -
the `get yarn cache directory` step,
instead of the `get yarn cache itself` step.

I've updated the example in hopes that it'll be clearer for others aswell!

Signed-off-by: Kipras Melnikovas <kipras@kipras.org>

* Explain which ID to use for `cache-hit` in yarn's example

Signed-off-by: Kipras Melnikovas <kipras@kipras.org>
2020-02-14 09:50:11 -05:00
4944275b95 test e2e during workflow (#185) 2020-02-13 12:38:56 -05:00
78a4b2143b Bump version to 1.1.2 2020-02-05 10:40:53 -05:00
4dc4b4e758 Change name back to Cache 2020-02-05 10:39:52 -05:00
85aee6a487 Update docs with 5GB limit 2020-02-05 10:33:21 -05:00
fab26f3f4f Bump version to 1.1.1 2020-02-05 09:55:35 -05:00
4887979af8 proxy support (#166)
* Replace typed rest client with new http-client

* Send Content-Type: application/json and fix up some types

* Lint

* Consume @actions/http-client:1.0.5

* Consume @actions/http-client:1.0.6

* Dont send headers manually, http-client automatically will
2020-02-05 09:24:37 -05:00
f9c9166ecb Increase cache limit to 5 GBs (#168)
* Increase cache limit to 5 GBs

* Fix test to use new size limit

* Update src/save.ts

Co-Authored-By: Josh Gross <joshmgross@github.com>

Co-authored-by: Josh Gross <joshmgross@github.com>
2020-02-01 16:11:02 -05:00
23e301d35c Disable fail-fast to get full coverage of failures 2020-01-29 20:34:56 -05:00
e43776276f Add Swift Package Manager (SPM) example (#159)
* Add Swift - SPM to examples

* Add link SPM example link to readme

* remove extra newline

* remove another extra newline
2020-01-29 11:13:59 -05:00
b6d538e2aa Add renv examples (#151)
* Add renv examples

* Add link in main readme.md
2020-01-21 19:22:40 -05:00
296374f6c9 Update action's description (#75)
* README: clarify case on the action

* Update description
2020-01-14 10:11:41 -05:00
6c11532937 Update Ruby docs. "Gem" -> "Bundler" (#150)
* Use "Bundler" which is the package manager

"Gem" isn't wrong, but not typically what a Ruby developer would think of.

* Update links

* Update links
2020-01-12 18:48:43 -05:00
c33bff8d72 Add Scala - SBT example (#134)
* Add Scala - SBT example

* Add Scala - SBT example to README
2020-01-10 17:09:06 -05:00
d1991bb4c5 Add Haskell - Cabal example (#148)
* Add Haskell - Cabal example

* Add link in main readme.md
2020-01-10 17:07:52 -05:00
60e292adf7 Update cache limits (#140) 2020-01-07 15:01:47 -05:00
c262ac0154 Fix number parsing issues 2020-01-06 14:06:24 -05:00
1da52de10f npm audit fix 2020-01-06 13:31:03 -05:00
b45d91cc4b Chunked Cache Upload APIs (#128)
* Initial pass at chunked upload apis

* Fix cacheEntry type

* Linting

* Fix download cache entry tests

* Linting tests

* Pull in fixes from testing branch

* Fix typo in ReserveCacheResponse

* Add test convering reserve cache failure

* Add retries to upload chunk

* PR feedback

* Format default chunk size

* Remove responses array
2020-01-06 13:05:50 -05:00
a631fadf14 README.md: fix grammar error (#136)
"it's" is short for "it is," but the use in this sentence is as a
possessive - something belonging to "it" - hence, "its" is correct.
2019-12-23 10:30:34 -05:00
e223b0a12d Merge pull request #124 from nogic1008/patch-1
Add Another C# Example to use personal cache folder
2019-12-16 10:24:25 -05:00
decbafc350 Update examples.md
Co-Authored-By: Chris Patterson <chrispat@github.com>
2019-12-16 09:45:29 +09:00
3854a40aee Use BSD tar on windows (#126)
* Use BSD tar on windows

* Linting

* Fallback to which tar if no system tar

* Fix formatting

* Bump prettier and typescript
2019-12-13 17:24:37 -05:00
0188dffc5a Revert original C# Example
* Treat "Use Personal Cache Folder" way as another C# example
* Describe the situation in which another example should be used
2019-12-13 10:03:43 +09:00
002d3a77f4 Use Personal Cache Folder in C# Example
Ref: #115
2019-12-10 09:21:47 +09:00
25 changed files with 11026 additions and 6937 deletions

View File

@ -12,5 +12,12 @@
"plugin:prettier/recommended", "plugin:prettier/recommended",
"prettier/@typescript-eslint" "prettier/@typescript-eslint"
], ],
"plugins": ["@typescript-eslint", "jest"] "plugins": ["@typescript-eslint", "simple-import-sort", "jest"],
"rules": {
"import/first": "error",
"import/newline-after-import": "error",
"import/no-duplicates": "error",
"simple-import-sort/sort": "error",
"sort-imports": "off"
}
} }

35
.github/workflows/codeql.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below).
- name: Autobuild
uses: github/codeql-action/autobuild@v1
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@ -13,41 +13,129 @@ on:
- '**.md' - '**.md'
jobs: jobs:
test: # Build and unit test
name: Test on ${{ matrix.os }} build:
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- uses: actions/checkout@v1 - name: Checkout
uses: actions/checkout@v2
- uses: actions/setup-node@v1 - name: Setup Node.js
uses: actions/setup-node@v1
with: with:
node-version: '12.x' node-version: '12.x'
- name: Determine npm cache directory
- name: Get npm cache directory
id: npm-cache id: npm-cache
run: | run: |
echo "::set-output name=dir::$(npm config get cache)" echo "::set-output name=dir::$(npm config get cache)"
- name: Restore npm cache
- uses: actions/cache@v1 uses: actions/cache@v1
with: with:
path: ${{ steps.npm-cache.outputs.dir }} path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: | restore-keys: |
${{ runner.os }}-node- ${{ runner.os }}-node-
- run: npm ci - run: npm ci
- name: Prettier Format Check - name: Prettier Format Check
run: npm run format-check run: npm run format-check
- name: ESLint Check - name: ESLint Check
run: npm run lint run: npm run lint
- name: Build & Test - name: Build & Test
run: npm run test run: npm run test
# End to end save and restore
test-save:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files in working directory
shell: bash
run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Save cache
uses: ./
with:
key: test-${{ runner.os }}-${{ github.run_id }}
path: |
test-cache
~/test-cache
test-restore:
needs: test-save
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
key: test-${{ runner.os }}-${{ github.run_id }}
path: |
test-cache
~/test-cache
- name: Verify cache files in working directory
shell: bash
run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache
- name: Verify cache files outside working directory
shell: bash
run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
# End to end with proxy
test-proxy-save:
runs-on: ubuntu-latest
container:
image: ubuntu:latest
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files
run: __tests__/create-cache-files.sh proxy test-cache
- name: Save cache
uses: ./
with:
key: test-proxy-${{ github.run_id }}
path: test-cache
test-proxy-restore:
needs: test-proxy-save
runs-on: ubuntu-latest
container:
image: ubuntu:latest
options: --dns 127.0.0.1
services:
squid-proxy:
image: datadog/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
key: test-proxy-${{ github.run_id }}
path: test-cache
- name: Verify cache
run: __tests__/verify-cache-files.sh proxy test-cache

6
.gitignore vendored
View File

@ -1,8 +1,5 @@
__tests__/runner/* __tests__/runner/*
# comment out in distribution branches
dist/
node_modules/ node_modules/
lib/ lib/
@ -94,3 +91,6 @@ typings/
# DynamoDB Local files # DynamoDB Local files
.dynamodb/ .dynamodb/
# Text editor files
.vscode/

View File

@ -1,6 +1,6 @@
# cache # cache
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time. This action allows caching dependencies and build outputs to improve workflow execution time.
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a> <a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
@ -37,7 +37,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- name: Cache Primes - name: Cache Primes
id: cache-primes id: cache-primes
@ -56,28 +56,33 @@ jobs:
## Implementation Examples ## Implementation Examples
Every programming language and framework has it's own way of caching. Every programming language and framework has its own way of caching.
See [Examples](examples.md) for a list of `actions/cache` implementations for use with: See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
- [C# - Nuget](./examples.md#c---nuget) - [C# - Nuget](./examples.md#c---nuget)
- [Elixir - Mix](./examples.md#elixir---mix) - [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules) - [Go - Modules](./examples.md#go---modules)
- [Haskell - Cabal](./examples.md#haskell---cabal)
- [Java - Gradle](./examples.md#java---gradle) - [Java - Gradle](./examples.md#java---gradle)
- [Java - Maven](./examples.md#java---maven) - [Java - Maven](./examples.md#java---maven)
- [Node - npm](./examples.md#node---npm) - [Node - npm](./examples.md#node---npm)
- [Node - Lerna](./examples.md#node---lerna)
- [Node - Yarn](./examples.md#node---yarn) - [Node - Yarn](./examples.md#node---yarn)
- [OCaml/Reason - esy](./examples.md##ocamlreason---esy)
- [PHP - Composer](./examples.md#php---composer) - [PHP - Composer](./examples.md#php---composer)
- [Python - pip](./examples.md#python---pip) - [Python - pip](./examples.md#python---pip)
- [Ruby - Gem](./examples.md#ruby---gem) - [R - renv](./examples.md#r---renv)
- [Ruby - Bundler](./examples.md#ruby---bundler)
- [Rust - Cargo](./examples.md#rust---cargo) - [Rust - Cargo](./examples.md#rust---cargo)
- [Scala - SBT](./examples.md#scala---sbt)
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) - [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) - [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
## Cache Limits ## Cache Limits
Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted. A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
## Skipping steps based on cache-hit ## Skipping steps based on cache-hit
@ -86,7 +91,7 @@ Using the `cache-hit` output, subsequent steps (such as install or build) can be
Example: Example:
```yaml ```yaml
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v2
- uses: actions/cache@v1 - uses: actions/cache@v1
id: cache id: cache

View File

@ -1,4 +1,6 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as io from "@actions/io";
import { promises as fs } from "fs";
import * as os from "os"; import * as os from "os";
import * as path from "path"; import * as path from "path";
@ -6,13 +8,24 @@ import { Events, Outputs, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts"; import { ArtifactCacheEntry } from "../src/contracts";
import * as actionUtils from "../src/utils/actionUtils"; import * as actionUtils from "../src/utils/actionUtils";
import uuid = require("uuid");
jest.mock("@actions/core"); jest.mock("@actions/core");
jest.mock("os"); jest.mock("os");
function getTempDir(): string {
return path.join(__dirname, "_temp", "actionUtils");
}
afterEach(() => { afterEach(() => {
delete process.env[Events.Key]; delete process.env[Events.Key];
}); });
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await io.rmRF(getTempDir());
});
test("getArchiveFileSize returns file size", () => { test("getArchiveFileSize returns file size", () => {
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
@ -181,17 +194,43 @@ test("isValidEvent returns false for unknown event", () => {
expect(isValidEvent).toBe(false); expect(isValidEvent).toBe(false);
}); });
test("resolvePath with no ~ in path", () => { test("resolvePaths with no ~ in path", async () => {
const filePath = ".cache/yarn"; const filePath = ".cache";
const resolvedPath = actionUtils.resolvePath(filePath); // Create the following layout:
// cwd
// cwd/.cache
// cwd/.cache/file.txt
const expectedPath = path.resolve(filePath); const root = path.join(getTempDir(), "no-tilde");
expect(resolvedPath).toBe(expectedPath); // tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
const cache = path.join(root, ".cache");
await fs.mkdir(cache, { recursive: true });
await fs.writeFile(path.join(cache, "file.txt"), "cached");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [filePath];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
}); });
test("resolvePath with ~ in path", () => { test("resolvePaths with ~ in path", async () => {
const filePath = "~/.cache/yarn"; const cacheDir = uuid();
const filePath = `~/${cacheDir}`;
// Create the following layout:
// ~/uuid
// ~/uuid/file.txt
const homedir = jest.requireActual("os").homedir(); const homedir = jest.requireActual("os").homedir();
const homedirMock = jest.spyOn(os, "homedir"); const homedirMock = jest.spyOn(os, "homedir");
@ -199,24 +238,93 @@ test("resolvePath with ~ in path", () => {
return homedir; return homedir;
}); });
const resolvedPath = actionUtils.resolvePath(filePath); const target = path.join(homedir, cacheDir);
await fs.mkdir(target, { recursive: true });
await fs.writeFile(path.join(target, "file.txt"), "cached");
const expectedPath = path.join(homedir, ".cache/yarn"); const root = getTempDir();
expect(resolvedPath).toBe(expectedPath); process.env["GITHUB_WORKSPACE"] = root;
try {
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [path.relative(root, target)];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
await io.rmRF(target);
}
}); });
test("resolvePath with home not found", () => { test("resolvePaths with home not found", async () => {
const filePath = "~/.cache/yarn"; const filePath = "~/.cache/yarn";
const homedirMock = jest.spyOn(os, "homedir"); const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => { homedirMock.mockImplementation(() => {
return ""; return "";
}); });
expect(() => actionUtils.resolvePath(filePath)).toThrow( await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
"Unable to resolve `~` to HOME" "Unable to determine HOME directory"
); );
}); });
test("resolvePaths inclusion pattern returns found", async () => {
const pattern = "*.ts";
// Create the following layout:
// inclusion-patterns
// inclusion-patterns/miss.txt
// inclusion-patterns/test.ts
const root = path.join(getTempDir(), "inclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([pattern]);
const expectedPath = ["test.ts"];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("resolvePaths exclusion pattern returns not found", async () => {
const patterns = ["*.ts", "!test.ts"];
// Create the following layout:
// exclusion-patterns
// exclusion-patterns/miss.txt
// exclusion-patterns/test.ts
const root = path.join(getTempDir(), "exclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "no match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths(patterns);
const expectedPath = [];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("isValidEvent returns true for push event", () => { test("isValidEvent returns true for push event", () => {
const event = Events.Push; const event = Events.Push;
process.env[Events.Key] = event; process.env[Events.Key] = event;
@ -234,3 +342,16 @@ test("isValidEvent returns true for pull request event", () => {
expect(isValidEvent).toBe(true); expect(isValidEvent).toBe(true);
}); });
test("unlinkFile unlinks file", async () => {
const testDirectory = await fs.mkdtemp("unlinkFileTest");
const testFile = path.join(testDirectory, "test.txt");
await fs.writeFile(testFile, "hello world");
await actionUtils.unlinkFile(testFile);
// This should throw as testFile should not exist
await expect(fs.stat(testFile)).rejects.toThrow();
await fs.rmdir(testDirectory);
});

View File

@ -0,0 +1,39 @@
import { getCacheVersion } from "../src/cacheHttpClient";
import { CompressionMethod, Inputs } from "../src/constants";
import * as testUtils from "../src/utils/testUtils";
afterEach(() => {
testUtils.clearInputs();
});
test("getCacheVersion with path input and compression method undefined returns version", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion();
expect(result).toEqual(
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
);
});
test("getCacheVersion with zstd compression returns version", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion(CompressionMethod.Zstd);
expect(result).toEqual(
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
);
});
test("getCacheVersion with gzip compression does not change vesion", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion(CompressionMethod.Gzip);
expect(result).toEqual(
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
);
});
test("getCacheVersion with no input throws", async () => {
expect(() => getCacheVersion()).toThrow();
});

17
__tests__/create-cache-files.sh Executable file
View File

@ -0,0 +1,17 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
path="$2"
if [ -z "$path" ]; then
echo "Must supply path argument"
exit 1
fi
mkdir -p $path
echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt

View File

@ -1,24 +1,24 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path"; import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient"; import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants"; import {
CacheFilename,
CompressionMethod,
Events,
Inputs
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts"; import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore"; import run from "../src/restore";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils"; import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils"; import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/exec");
jest.mock("@actions/io");
jest.mock("../src/utils/actionUtils");
jest.mock("../src/cacheHttpClient"); jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => { beforeAll(() => {
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => { (key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils"); const actualUtils = jest.requireActual("../src/utils/actionUtils");
@ -36,8 +36,9 @@ beforeAll(() => {
return actualUtils.getSupportedEvents(); return actualUtils.getSupportedEvents();
}); });
jest.spyOn(io, "which").mockImplementation(tool => { jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
return Promise.resolve(tool); const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getCacheFileName(cm);
}); });
}); });
@ -65,7 +66,8 @@ test("restore with invalid event outputs warning", async () => {
test("restore with no path should fail", async () => { test("restore with no path should fail", async () => {
const failedMock = jest.spyOn(core, "setFailed"); const failedMock = jest.spyOn(core, "setFailed");
await run(); await run();
expect(failedMock).toHaveBeenCalledWith( // this input isn't necessary for restore b/c tarball contains entries relative to workspace
expect(failedMock).not.toHaveBeenCalledWith(
"Input required and not supplied: path" "Input required and not supplied: path"
); );
}); });
@ -142,7 +144,7 @@ test("restore with no cache found", async () => {
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith( expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}.` `Cache not found for input keys: ${key}`
); );
}); });
@ -201,13 +203,12 @@ test("restore with restore keys and no cache found", async () => {
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith( expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}, ${restoreKey}.` `Cache not found for input keys: ${key}, ${restoreKey}`
); );
}); });
test("restore with cache found", async () => { test("restore with gzip compressed cache found", async () => {
const key = "node-test"; const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({ testUtils.setInputs({
path: "node_modules", path: "node_modules",
key key
@ -236,7 +237,7 @@ test("restore with cache found", async () => {
return Promise.resolve(tempPath); return Promise.resolve(tempPath);
}); });
const archivePath = path.join(tempPath, "cache.tgz"); const archivePath = path.join(tempPath, CacheFilename.Gzip);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
@ -245,14 +246,21 @@ test("restore with cache found", async () => {
.spyOn(actionUtils, "getArchiveFileSize") .spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize); .mockReturnValue(fileSize);
const mkdirMock = jest.spyOn(io, "mkdirP"); const extractTarMock = jest.spyOn(tar, "extractTar");
const execMock = jest.spyOn(exec, "exec"); const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run(); await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]); expect(getCacheMock).toHaveBeenCalledWith([key], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(
@ -260,33 +268,23 @@ test("restore with cache found", async () => {
archivePath archivePath
); );
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
const IS_WINDOWS = process.platform === "win32"; expect(extractTarMock).toHaveBeenCalledTimes(1);
const args = IS_WINDOWS expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1); expect(unlinkFileMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args); expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
}); });
test("restore with a pull request event and cache found", async () => { test("restore with a pull request event and zstd compressed cache found", async () => {
const key = "node-test"; const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({ testUtils.setInputs({
path: "node_modules", path: "node_modules",
key key
@ -317,7 +315,7 @@ test("restore with a pull request event and cache found", async () => {
return Promise.resolve(tempPath); return Promise.resolve(tempPath);
}); });
const archivePath = path.join(tempPath, "cache.tgz"); const archivePath = path.join(tempPath, CacheFilename.Zstd);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
@ -326,14 +324,19 @@ test("restore with a pull request event and cache found", async () => {
.spyOn(actionUtils, "getArchiveFileSize") .spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize); .mockReturnValue(fileSize);
const mkdirMock = jest.spyOn(io, "mkdirP"); const extractTarMock = jest.spyOn(tar, "extractTar");
const execMock = jest.spyOn(exec, "exec");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run(); await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]); expect(getCacheMock).toHaveBeenCalledWith([key], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(
@ -342,34 +345,21 @@ test("restore with a pull request event and cache found", async () => {
); );
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
const IS_WINDOWS = process.platform === "win32"; expect(extractTarMock).toHaveBeenCalledTimes(1);
const args = IS_WINDOWS expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
}); });
test("restore with cache found for restore key", async () => { test("restore with cache found for restore key", async () => {
const key = "node-test"; const key = "node-test";
const restoreKey = "node-"; const restoreKey = "node-";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({ testUtils.setInputs({
path: "node_modules", path: "node_modules",
key, key,
@ -399,7 +389,7 @@ test("restore with cache found for restore key", async () => {
return Promise.resolve(tempPath); return Promise.resolve(tempPath);
}); });
const archivePath = path.join(tempPath, "cache.tgz"); const archivePath = path.join(tempPath, CacheFilename.Zstd);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
@ -408,14 +398,19 @@ test("restore with cache found for restore key", async () => {
.spyOn(actionUtils, "getArchiveFileSize") .spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize); .mockReturnValue(fileSize);
const mkdirMock = jest.spyOn(io, "mkdirP"); const extractTarMock = jest.spyOn(tar, "extractTar");
const execMock = jest.spyOn(exec, "exec");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run(); await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(
@ -424,22 +419,9 @@ test("restore with cache found for restore key", async () => {
); );
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
const IS_WINDOWS = process.platform === "win32"; expect(extractTarMock).toHaveBeenCalledTimes(1);
const args = IS_WINDOWS expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
@ -448,4 +430,5 @@ test("restore with cache found for restore key", async () => {
`Cache restored from key: ${restoreKey}` `Cache restored from key: ${restoreKey}`
); );
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
}); });

View File

@ -1,19 +1,23 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path"; import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient"; import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants"; import {
CacheFilename,
CompressionMethod,
Events,
Inputs
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts"; import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save"; import run from "../src/save";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils"; import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils"; import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/core"); jest.mock("@actions/core");
jest.mock("@actions/exec");
jest.mock("@actions/io");
jest.mock("../src/utils/actionUtils");
jest.mock("../src/cacheHttpClient"); jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => { beforeAll(() => {
jest.spyOn(core, "getInput").mockImplementation((name, options) => { jest.spyOn(core, "getInput").mockImplementation((name, options) => {
@ -42,16 +46,19 @@ beforeAll(() => {
return actualUtils.getSupportedEvents(); return actualUtils.getSupportedEvents();
}); });
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
return path.resolve(filePath); async filePaths => {
}); return filePaths.map(x => path.resolve(x));
}
);
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
return Promise.resolve("/foo/bar"); return Promise.resolve("/foo/bar");
}); });
jest.spyOn(io, "which").mockImplementation(tool => { jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
return Promise.resolve(tool); const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getCacheFileName(cm);
}); });
}); });
@ -128,7 +135,7 @@ test("save with exact match returns early", async () => {
return primaryKey; return primaryKey;
}); });
const execMock = jest.spyOn(exec, "exec"); const createTarMock = jest.spyOn(tar, "createTar");
await run(); await run();
@ -136,7 +143,7 @@ test("save with exact match returns early", async () => {
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.` `Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
); );
expect(execMock).toHaveBeenCalledTimes(0); expect(createTarMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
}); });
@ -195,42 +202,93 @@ test("save with large cache outputs warning", async () => {
}); });
const inputPath = "node_modules"; const inputPath = "node_modules";
const cachePath = path.resolve(inputPath); const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath); testUtils.setInput(Inputs.Path, inputPath);
const execMock = jest.spyOn(exec, "exec"); const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize; return cacheSize;
}); });
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run(); await run();
const archivePath = path.join("/foo/bar", "cache.tgz"); const archiveFolder = "/foo/bar";
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith( expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache." "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
); );
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
}); });
test("save with server error outputs warning", async () => { test("save with server error outputs warning", async () => {
@ -256,52 +314,53 @@ test("save with server error outputs warning", async () => {
}); });
const inputPath = "node_modules"; const inputPath = "node_modules";
const cachePath = path.resolve(inputPath); const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath); testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4; const cacheId = 4;
const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { const reserveCacheMock = jest
return Promise.resolve(cacheId); .spyOn(cacheHttpClient, "reserveCache")
}); .mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const execMock = jest.spyOn(exec, "exec"); const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest const saveCacheMock = jest
.spyOn(cacheHttpClient, "saveCache") .spyOn(cacheHttpClient, "saveCache")
.mockImplementationOnce(() => { .mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred"); throw new Error("HTTP Error Occurred");
}); });
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run(); await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1); expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
const archivePath = path.join("/foo/bar", "cache.tgz"); const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
const IS_WINDOWS = process.platform === "win32"; expect(createTarMock).toHaveBeenCalledTimes(1);
const args = IS_WINDOWS expect(createTarMock).toHaveBeenCalledWith(
? [ archiveFolder,
"-cz", cachePaths,
"--force-local", compression
"-f", );
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
}); });
test("save with valid inputs uploads a cache", async () => { test("save with valid inputs uploads a cache", async () => {
@ -326,43 +385,44 @@ test("save with valid inputs uploads a cache", async () => {
}); });
const inputPath = "node_modules"; const inputPath = "node_modules";
const cachePath = path.resolve(inputPath); const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath); testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4; const cacheId = 4;
const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => { const reserveCacheMock = jest
return Promise.resolve(cacheId); .spyOn(cacheHttpClient, "reserveCache")
}); .mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const execMock = jest.spyOn(exec, "exec"); const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run(); await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1); expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
const archivePath = path.join("/foo/bar", "cache.tgz"); const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
const IS_WINDOWS = process.platform === "win32"; expect(createTarMock).toHaveBeenCalledTimes(1);
const args = IS_WINDOWS expect(createTarMock).toHaveBeenCalledWith(
? [ archiveFolder,
"-cz", cachePaths,
"--force-local", compression
"-f", );
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
}); });

204
__tests__/tar.test.ts Normal file
View File

@ -0,0 +1,204 @@
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import { CacheFilename, CompressionMethod } from "../src/constants";
import * as tar from "../src/tar";
import * as utils from "../src/utils/actionUtils";
import fs = require("fs");
jest.mock("@actions/exec");
jest.mock("@actions/io");
const IS_WINDOWS = process.platform === "win32";
function getTempDir(): string {
return path.join(__dirname, "_temp", "tar");
}
beforeAll(async () => {
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
process.env["GITHUB_WORKSPACE"] = process.cwd();
await jest.requireActual("@actions/io").rmRF(getTempDir());
});
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await jest.requireActual("@actions/io").rmRF(getTempDir());
});
test("zstd extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Zstd);
expect(mkdirMock).toHaveBeenCalledWith(workspace);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"--use-compress-program",
"zstd -d --long=30",
"-xf",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
],
{ cwd: undefined }
);
});
test("gzip extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Gzip);
expect(mkdirMock).toHaveBeenCalledWith(workspace);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"-z",
"-xf",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
],
{ cwd: undefined }
);
});
test("gzip extract GNU tar on windows", async () => {
if (IS_WINDOWS) {
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
const isGnuMock = jest
.spyOn(utils, "useGnuTar")
.mockReturnValue(Promise.resolve(true));
const execMock = jest.spyOn(exec, "exec");
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Gzip);
expect(isGnuMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"tar"`,
[
"-z",
"-xf",
archivePath.replace(/\\/g, "/"),
"-P",
"-C",
workspace?.replace(/\\/g, "/"),
"--force-local"
],
{ cwd: undefined }
);
}
});
test("zstd create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await fs.promises.mkdir(archiveFolder, { recursive: true });
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Zstd
);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"--use-compress-program",
"zstd -T0 --long=30",
"-cf",
IS_WINDOWS
? CacheFilename.Zstd.replace(/\\/g, "/")
: CacheFilename.Zstd,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
});
test("gzip create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await fs.promises.mkdir(archiveFolder, { recursive: true });
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Gzip
);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"-z",
"-cf",
IS_WINDOWS
? CacheFilename.Gzip.replace(/\\/g, "/")
: CacheFilename.Gzip,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
});

36
__tests__/verify-cache-files.sh Executable file
View File

@ -0,0 +1,36 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
path="$2"
if [ -z "$path" ]; then
echo "Must specify path argument"
exit 1
fi
# Sanity check GITHUB_RUN_ID defined
if [ -z "$GITHUB_RUN_ID" ]; then
echo "GITHUB_RUN_ID not defined"
exit 1
fi
# Verify file exists
file="$path/test-file.txt"
echo "Checking for $file"
if [ ! -e $file ]; then
echo "File does not exist"
exit 1
fi
# Verify file content
content="$(cat $file)"
echo "File content:\n$content"
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
echo "Unexpected file content"
exit 1
fi

View File

@ -1,5 +1,5 @@
name: 'Cache' name: 'Cache'
description: 'Cache dependencies and build outputs to improve workflow execution time' description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
author: 'GitHub' author: 'GitHub'
inputs: inputs:
path: path:

6067
dist/restore/index.js vendored

File diff suppressed because it is too large Load Diff

6056
dist/save/index.js vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,20 +1,36 @@
# Examples # Examples
- [C# - Nuget](#c---nuget) - [Examples](#examples)
- [Elixir - Mix](#elixir---mix) - [C# - NuGet](#c---nuget)
- [Go - Modules](#go---modules) - [Elixir - Mix](#elixir---mix)
- [Java - Gradle](#java---gradle) - [Go - Modules](#go---modules)
- [Java - Maven](#java---maven) - [Haskell - Cabal](#haskell---cabal)
- [Node - npm](#node---npm) - [Java - Gradle](#java---gradle)
- [Node - Yarn](#node---yarn) - [Java - Maven](#java---maven)
- [PHP - Composer](#php---composer) - [Node - npm](#node---npm)
- [Python - pip](#python---pip) - [macOS and Ubuntu](#macos-and-ubuntu)
- [Ruby - Gem](#ruby---gem) - [Windows](#windows)
- [Rust - Cargo](#rust---cargo) - [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage) - [Node - Lerna](#node---lerna)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) - [Node - Yarn](#node---yarn)
- [OCaml/Reason - esy](#ocamlreason---esy)
- [PHP - Composer](#php---composer)
- [Python - pip](#python---pip)
- [Simple example](#simple-example)
- [Multiple OS's in a workflow](#multiple-oss-in-a-workflow)
- [Using pip to get cache location](#using-pip-to-get-cache-location)
- [Using a script to get cache location](#using-a-script-to-get-cache-location)
- [R - renv](#r---renv)
- [Simple example](#simple-example-1)
- [Multiple OS's in a workflow](#multiple-oss-in-a-workflow-1)
- [Ruby - Bundler](#ruby---bundler)
- [Rust - Cargo](#rust---cargo)
- [Scala - SBT](#scala---sbt)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](#swift---swift-package-manager)
## C# - Nuget ## C# - NuGet
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml ```yaml
@ -26,6 +42,21 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
${{ runner.os }}-nuget- ${{ runner.os }}-nuget-
``` ```
Depending on the environment, huge packages might be pre-installed in the global cache folder.
If you do not want to include them, consider to move the cache folder like below.
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
```yaml
env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps:
- uses: actions/cache@v1
with:
path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
## Elixir - Mix ## Elixir - Mix
```yaml ```yaml
- uses: actions/cache@v1 - uses: actions/cache@v1
@ -47,13 +78,35 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
${{ runner.os }}-go- ${{ runner.os }}-go-
``` ```
## Haskell - Cabal
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
```yaml
- uses: actions/cache@v1
name: Cache ~/.cabal/packages
with:
path: ~/.cabal/packages
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
- uses: actions/cache@v1
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle
with:
path: dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
```
## Java - Gradle ## Java - Gradle
```yaml ```yaml
- uses: actions/cache@v1 - uses: actions/cache@v1
with: with:
path: ~/.gradle/caches path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }} key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
restore-keys: | restore-keys: |
${{ runner.os }}-gradle- ${{ runner.os }}-gradle-
``` ```
@ -89,10 +142,14 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
### Windows ### Windows
```yaml ```yaml
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1 - uses: actions/cache@v1
with: with:
path: ~\AppData\Roaming\npm-cache path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }} key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: | restore-keys: |
${{ runner.os }}-node- ${{ runner.os }}-node-
``` ```
@ -112,22 +169,64 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
${{ runner.os }}-node- ${{ runner.os }}-node-
``` ```
## Node - Lerna
>Note this example uses the new multi-paths feature and is only available at `master`
```yaml
- name: restore lerna
uses: actions/cache@master
with:
path: |
node_modules
*/*/node_modules
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}
```
## Node - Yarn ## Node - Yarn
The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info. The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info.
```yaml ```yaml
- name: Get yarn cache - name: Get yarn cache directory path
id: yarn-cache id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)" run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1 - uses: actions/cache@v1
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with: with:
path: ${{ steps.yarn-cache.outputs.dir }} path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-yarn- ${{ runner.os }}-yarn-
``` ```
## OCaml/Reason - esy
Esy allows you to export built dependencies and import pre-built dependencies.
```yaml
- name: Restore Cache
id: restore-cache
uses: actions/cache@v1
with:
path: _export
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
restore-keys: |
${{ runner.os }}-esy-
- name: Esy install
run: 'esy install'
- name: Import Cache
run: |
esy import-dependencies _export
rm -rf _export
...(Build job)...
# Re-export dependencies if anything has changed or if it is the first time
- name: Setting dependency cache
run: |
esy export-dependencies
if: steps.restore-cache.outputs.cache-hit != 'true'
```
## PHP - Composer ## PHP - Composer
```yaml ```yaml
@ -192,14 +291,32 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
${{ runner.os }}-pip- ${{ runner.os }}-pip-
``` ```
### Using pip to get cache location
> Note: This requires pip 20.1+
```yaml
- name: Get pip cache dir
id: pip-cache
run: |
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
### Using a script to get cache location ### Using a script to get cache location
> Note: This uses an internal pip API and may not always work > Note: This uses an internal pip API and may not always work
```yaml ```yaml
- name: Get pip cache - name: Get pip cache dir
id: pip-cache id: pip-cache
run: | run: |
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
- uses: actions/cache@v1 - uses: actions/cache@v1
with: with:
@ -209,15 +326,64 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
${{ runner.os }}-pip- ${{ runner.os }}-pip-
``` ```
## Ruby - Gem ## R - renv
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
Locations:
- Ubuntu: `~/.local/share/renv`
- macOS: `~/Library/Application Support/renv`
- Windows: `%LOCALAPPDATA%/renv`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Application Support/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
## Ruby - Bundler
```yaml ```yaml
- uses: actions/cache@v1 - uses: actions/cache@v1
with: with:
path: vendor/bundle path: vendor/bundle
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }} key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-gem- ${{ runner.os }}-gems-
``` ```
When dependencies are installed later in the workflow, we must specify the same path for the bundler. When dependencies are installed later in the workflow, we must specify the same path for the bundler.
@ -248,6 +414,21 @@ When dependencies are installed later in the workflow, we must specify the same
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
``` ```
## Scala - SBT
```yaml
- name: Cache SBT ivy cache
uses: actions/cache@v1
with:
path: ~/.ivy2/cache
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
- name: Cache SBT
uses: actions/cache@v1
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
```
## Swift, Objective-C - Carthage ## Swift, Objective-C - Carthage
```yaml ```yaml
@ -269,3 +450,14 @@ When dependencies are installed later in the workflow, we must specify the same
restore-keys: | restore-keys: |
${{ runner.os }}-pods- ${{ runner.os }}-pods-
``` ```
## Swift - Swift Package Manager
```yaml
- uses: actions/cache@v1
with:
path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
restore-keys: |
${{ runner.os }}-spm-
```

3965
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,16 +1,15 @@
{ {
"name": "cache", "name": "cache",
"version": "1.1.0", "version": "1.1.2",
"private": true, "private": true,
"description": "Cache dependencies and build outputs", "description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js", "main": "dist/restore/index.js",
"scripts": { "scripts": {
"build": "tsc", "build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
"test": "tsc --noEmit && jest --coverage", "test": "tsc --noEmit && jest --coverage",
"lint": "eslint **/*.ts --cache", "lint": "eslint **/*.ts --cache",
"format": "prettier --write **/*.ts", "format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts", "format-check": "prettier --check **/*.ts"
"release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/"
}, },
"repository": { "repository": {
"type": "git", "type": "git",
@ -26,8 +25,9 @@
"dependencies": { "dependencies": {
"@actions/core": "^1.2.0", "@actions/core": "^1.2.0",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^1.0.8",
"@actions/io": "^1.0.1", "@actions/io": "^1.0.1",
"typed-rest-client": "^1.5.0",
"uuid": "^3.3.3" "uuid": "^3.3.3"
}, },
"devDependencies": { "devDependencies": {
@ -43,6 +43,7 @@
"eslint-plugin-import": "^2.18.2", "eslint-plugin-import": "^2.18.2",
"eslint-plugin-jest": "^23.0.3", "eslint-plugin-jest": "^23.0.3",
"eslint-plugin-prettier": "^3.1.1", "eslint-plugin-prettier": "^3.1.1",
"eslint-plugin-simple-import-sort": "^5.0.2",
"jest": "^24.8.0", "jest": "^24.8.0",
"jest-circus": "^24.7.1", "jest-circus": "^24.7.1",
"nock": "^11.7.0", "nock": "^11.7.0",

View File

@ -1,25 +1,48 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as fs from "fs"; import { HttpClient, HttpCodes } from "@actions/http-client";
import { BearerCredentialHandler } from "typed-rest-client/Handlers"; import { BearerCredentialHandler } from "@actions/http-client/auth";
import { HttpClient } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import { import {
IHttpClientResponse,
IRequestOptions, IRequestOptions,
RestClient, ITypedResponse
IRestResponse } from "@actions/http-client/interfaces";
} from "typed-rest-client/RestClient"; import * as crypto from "crypto";
import * as fs from "fs";
import * as stream from "stream";
import * as util from "util";
import { CompressionMethod, DefaultSocketTimeout, Inputs } from "./constants";
import { import {
ArtifactCacheEntry, ArtifactCacheEntry,
CacheOptions,
CommitCacheRequest, CommitCacheRequest,
ReserveCacheRequest, ReserveCacheRequest,
ReserverCacheResponse ReserveCacheResponse
} from "./contracts"; } from "./contracts";
import * as utils from "./utils/actionUtils"; import * as utils from "./utils/actionUtils";
function isSuccessStatusCode(statusCode: number): boolean { const versionSalt = "1.0";
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300; return statusCode >= 200 && statusCode < 300;
} }
function getCacheApiUrl(): string {
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL // Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = ( const baseUrl: string = (
process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_CACHE_URL"] ||
@ -32,8 +55,9 @@ function getCacheApiUrl(): string {
); );
} }
core.debug(`Cache Url: ${baseUrl}`); const url = `${baseUrl}_apis/artifactcache/${resource}`;
return `${baseUrl}_apis/artifactcache/`; core.debug(`Resource Url: ${url}`);
return url;
} }
function createAcceptHeader(type: string, apiVersion: string): string { function createAcceptHeader(type: string, apiVersion: string): string {
@ -42,30 +66,59 @@ function createAcceptHeader(type: string, apiVersion: string): string {
function getRequestOptions(): IRequestOptions { function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = { const requestOptions: IRequestOptions = {
acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") headers: {
Accept: createAcceptHeader("application/json", "6.0-preview.1")
}
}; };
return requestOptions; return requestOptions;
} }
function createRestClient(): RestClient { function createHttpClient(): HttpClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token); const bearerCredentialHandler = new BearerCredentialHandler(token);
return new RestClient("actions/cache", getCacheApiUrl(), [ return new HttpClient(
bearerCredentialHandler "actions/cache",
]); [bearerCredentialHandler],
getRequestOptions()
);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
const components = [core.getInput(Inputs.Path, { required: true })].concat(
compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : []
);
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto
.createHash("sha256")
.update(components.join("|"))
.digest("hex");
} }
export async function getCacheEntry( export async function getCacheEntry(
keys: string[] keys: string[],
options?: CacheOptions
): Promise<ArtifactCacheEntry | null> { ): Promise<ArtifactCacheEntry | null> {
const restClient = createRestClient(); const httpClient = createHttpClient();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; const version = getCacheVersion(options?.compressionMethod);
const resource = `cache?keys=${encodeURIComponent(
keys.join(",")
)}&version=${version}`;
const response = await restClient.get<ArtifactCacheEntry>( const response = await httpClient.getJson<ArtifactCacheEntry>(
resource, getCacheApiUrl(resource)
getRequestOptions()
); );
if (response.statusCode === 204) { if (response.statusCode === 204) {
return null; return null;
@ -73,6 +126,7 @@ export async function getCacheEntry(
if (!isSuccessStatusCode(response.statusCode)) { if (!isSuccessStatusCode(response.statusCode)) {
throw new Error(`Cache service responded with ${response.statusCode}`); throw new Error(`Cache service responded with ${response.statusCode}`);
} }
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult?.archiveLocation; const cacheDownloadUrl = cacheResult?.archiveLocation;
if (!cacheDownloadUrl) { if (!cacheDownloadUrl) {
@ -87,13 +141,10 @@ export async function getCacheEntry(
async function pipeResponseToStream( async function pipeResponseToStream(
response: IHttpClientResponse, response: IHttpClientResponse,
stream: NodeJS.WritableStream output: NodeJS.WritableStream
): Promise<void> { ): Promise<void> {
return new Promise(resolve => { const pipeline = util.promisify(stream.pipeline);
response.message.pipe(stream).on("close", () => { await pipeline(response.message, output);
resolve();
});
});
} }
export async function downloadCache( export async function downloadCache(
@ -103,22 +154,53 @@ export async function downloadCache(
const stream = fs.createWriteStream(archivePath); const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache"); const httpClient = new HttpClient("actions/cache");
const downloadResponse = await httpClient.get(archiveLocation); const downloadResponse = await httpClient.get(archiveLocation);
// Abort download if no traffic received over the socket.
const socketTimeout =
parseEnvNumber("CACHE_SOCKET_TIMEOUT") ?? DefaultSocketTimeout;
downloadResponse.message.socket.setTimeout(socketTimeout, () => {
downloadResponse.message.destroy();
core.debug(
`Aborting download, socket timed out after ${socketTimeout} ms`
);
});
await pipeResponseToStream(downloadResponse, stream); await pipeResponseToStream(downloadResponse, stream);
// Validate download size.
const contentLengthHeader =
downloadResponse.message.headers["content-length"];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSize(archivePath);
if (actualLength != expectedLength) {
throw new Error(
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
);
}
} else {
core.debug("Unable to validate download, no Content-Length header");
}
} }
// Reserve Cache // Reserve Cache
export async function reserveCache(key: string): Promise<number> { export async function reserveCache(
const restClient = createRestClient(); key: string,
options?: CacheOptions
): Promise<number> {
const httpClient = createHttpClient();
const version = getCacheVersion(options?.compressionMethod);
const reserveCacheRequest: ReserveCacheRequest = { const reserveCacheRequest: ReserveCacheRequest = {
key key,
version
}; };
const response = await restClient.create<ReserverCacheResponse>( const response = await httpClient.postJson<ReserveCacheResponse>(
"caches", getCacheApiUrl("caches"),
reserveCacheRequest, reserveCacheRequest
getRequestOptions()
); );
return response?.result?.cacheId ?? -1; return response?.result?.cacheId ?? -1;
} }
@ -132,12 +214,12 @@ function getContentRange(start: number, end: number): string {
} }
async function uploadChunk( async function uploadChunk(
restClient: RestClient, httpClient: HttpClient,
resourceUrl: string, resourceUrl: string,
data: NodeJS.ReadableStream, data: NodeJS.ReadableStream,
start: number, start: number,
end: number end: number
): Promise<IRestResponse<void>> { ): Promise<void> {
core.debug( core.debug(
`Uploading chunk of size ${end - `Uploading chunk of size ${end -
start + start +
@ -146,92 +228,102 @@ async function uploadChunk(
end end
)}` )}`
); );
const requestOptions = getRequestOptions(); const additionalHeaders = {
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream", "Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end) "Content-Range": getContentRange(start, end)
}; };
return await restClient.uploadStream<void>( const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
"PATCH", return await httpClient.sendStream(
resourceUrl, "PATCH",
data, resourceUrl,
requestOptions data,
additionalHeaders
);
};
const response = await uploadChunkRequest();
if (isSuccessStatusCode(response.message.statusCode)) {
return;
}
if (isRetryableStatusCode(response.message.statusCode)) {
core.debug(
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
);
const retryResponse = await uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
return;
}
}
throw new Error(
`Cache service responded with ${response.message.statusCode} during chunk upload.`
); );
} }
async function uploadFile( async function uploadFile(
restClient: RestClient, httpClient: HttpClient,
cacheId: number, cacheId: number,
archivePath: string archivePath: string
): Promise<void> { ): Promise<void> {
// Upload Chunks // Upload Chunks
const fileSize = fs.statSync(archivePath).size; const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const responses: IRestResponse<void>[] = [];
const fd = fs.openSync(archivePath, "r"); const fd = fs.openSync(archivePath, "r");
const concurrency = 4; // # of HTTP requests in parallel const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()]; const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads"); core.debug("Awaiting all uploads");
let offset = 0; let offset = 0;
await Promise.all(
parallelUploads.map(async () => { try {
while (offset < fileSize) { await Promise.all(
const chunkSize = parallelUploads.map(async () => {
offset + MAX_CHUNK_SIZE > fileSize while (offset < fileSize) {
? fileSize - offset const chunkSize = Math.min(
: MAX_CHUNK_SIZE; fileSize - offset,
const start = offset; MAX_CHUNK_SIZE
const end = offset + chunkSize - 1; );
offset += MAX_CHUNK_SIZE; const start = offset;
const chunk = fs.createReadStream(archivePath, { const end = offset + chunkSize - 1;
fd, offset += MAX_CHUNK_SIZE;
start, const chunk = fs.createReadStream(archivePath, {
end, fd,
autoClose: false start,
}); end,
responses.push( autoClose: false
});
await uploadChunk( await uploadChunk(
restClient, httpClient,
resourceUrl, resourceUrl,
chunk, chunk,
start, start,
end end
) );
); }
} })
})
);
fs.closeSync(fd);
const failedResponse = responses.find(
x => !isSuccessStatusCode(x.statusCode)
);
if (failedResponse) {
throw new Error(
`Cache service responded with ${failedResponse.statusCode} during chunk upload.`
); );
} finally {
fs.closeSync(fd);
} }
return; return;
} }
async function commitCache( async function commitCache(
restClient: RestClient, httpClient: HttpClient,
cacheId: number, cacheId: number,
filesize: number filesize: number
): Promise<IRestResponse<void>> { ): Promise<ITypedResponse<null>> {
const requestOptions = getRequestOptions();
const commitCacheRequest: CommitCacheRequest = { size: filesize }; const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await restClient.create( return await httpClient.postJson<null>(
`caches/${cacheId.toString()}`, getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest, commitCacheRequest
requestOptions
); );
} }
@ -239,16 +331,16 @@ export async function saveCache(
cacheId: number, cacheId: number,
archivePath: string archivePath: string
): Promise<void> { ): Promise<void> {
const restClient = createRestClient(); const httpClient = createHttpClient();
core.debug("Upload cache"); core.debug("Upload cache");
await uploadFile(restClient, cacheId, archivePath); await uploadFile(httpClient, cacheId, archivePath);
// Commit Cache // Commit Cache
core.debug("Commiting cache"); core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath); const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache( const commitCacheResponse = await commitCache(
restClient, httpClient,
cacheId, cacheId,
cacheSize cacheSize
); );

View File

@ -18,3 +18,18 @@ export enum Events {
Push = "push", Push = "push",
PullRequest = "pull_request" PullRequest = "pull_request"
} }
export enum CacheFilename {
Gzip = "cache.tgz",
Zstd = "cache.tzst"
}
export enum CompressionMethod {
Gzip = "gzip",
Zstd = "zstd"
}
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
export const DefaultSocketTimeout = 5000;

8
src/contracts.d.ts vendored
View File

@ -1,3 +1,5 @@
import { CompressionMethod } from "./constants";
export interface ArtifactCacheEntry { export interface ArtifactCacheEntry {
cacheKey?: string; cacheKey?: string;
scope?: string; scope?: string;
@ -14,6 +16,10 @@ export interface ReserveCacheRequest {
version?: string; version?: string;
} }
export interface ReserverCacheResponse { export interface ReserveCacheResponse {
cacheId: number; cacheId: number;
} }
export interface CacheOptions {
compressionMethod?: CompressionMethod;
}

View File

@ -1,9 +1,9 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path"; import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient"; import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants"; import { Events, Inputs, State } from "./constants";
import { extractTar } from "./tar";
import * as utils from "./utils/actionUtils"; import * as utils from "./utils/actionUtils";
async function run(): Promise<void> { async function run(): Promise<void> {
@ -20,11 +20,6 @@ async function run(): Promise<void> {
return; return;
} }
const cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
const primaryKey = core.getInput(Inputs.Key, { required: true }); const primaryKey = core.getInput(Inputs.Key, { required: true });
core.saveState(State.CacheKey, primaryKey); core.saveState(State.CacheKey, primaryKey);
@ -59,58 +54,49 @@ async function run(): Promise<void> {
} }
} }
const compressionMethod = await utils.getCompressionMethod();
try { try {
const cacheEntry = await cacheHttpClient.getCacheEntry(keys); const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
if (!cacheEntry || !cacheEntry?.archiveLocation) { compressionMethod: compressionMethod
core.info( });
`Cache not found for input keys: ${keys.join(", ")}.` if (!cacheEntry?.archiveLocation) {
); core.info(`Cache not found for input keys: ${keys.join(", ")}`);
return; return;
} }
const archivePath = path.join( const archivePath = path.join(
await utils.createTempDirectory(), await utils.createTempDirectory(),
"cache.tgz" utils.getCacheFileName(compressionMethod)
); );
core.debug(`Archive Path: ${archivePath}`); core.debug(`Archive Path: ${archivePath}`);
// Store the cache result // Store the cache result
utils.setCacheState(cacheEntry); utils.setCacheState(cacheEntry);
// Download the cache from the cache entry try {
await cacheHttpClient.downloadCache( // Download the cache from the cache entry
cacheEntry?.archiveLocation, await cacheHttpClient.downloadCache(
archivePath cacheEntry.archiveLocation,
); archivePath
);
const archiveFileSize = utils.getArchiveFileSize(archivePath); const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info( core.info(
`Cache Size: ~${Math.round( `Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024) archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)` )} MB (${archiveFileSize} B)`
); );
// Create directory to extract tar into await extractTar(archivePath, compressionMethod);
await io.mkdirP(cachePath); } finally {
// Try to delete the archive to save space
// http://man7.org/linux/man-pages/man1/tar.1.html try {
// tar [-options] <name of the tar archive> [files or directories which to add into archive] await utils.unlinkFile(archivePath);
const IS_WINDOWS = process.platform === "win32"; } catch (error) {
const args = IS_WINDOWS core.debug(`Failed to delete archive: ${error}`);
? [ }
"-xz", }
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const isExactKeyMatch = utils.isExactKeyMatch( const isExactKeyMatch = utils.isExactKeyMatch(
primaryKey, primaryKey,

View File

@ -1,9 +1,9 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path"; import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient"; import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants"; import { Events, Inputs, State } from "./constants";
import { createTar } from "./tar";
import * as utils from "./utils/actionUtils"; import * as utils from "./utils/actionUtils";
async function run(): Promise<void> { async function run(): Promise<void> {
@ -35,58 +35,52 @@ async function run(): Promise<void> {
return; return;
} }
const compressionMethod = await utils.getCompressionMethod();
core.debug("Reserving Cache"); core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey); const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
if (cacheId < 0) { compressionMethod: compressionMethod
});
if (cacheId == -1) {
core.info( core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
); );
return; return;
} }
core.debug(`Cache ID: ${cacheId}`); core.debug(`Cache ID: ${cacheId}`);
const cachePath = utils.resolvePath( const cachePaths = await utils.resolvePaths(
core.getInput(Inputs.Path, { required: true }) core
.getInput(Inputs.Path, { required: true })
.split("\n")
.filter(x => x !== "")
); );
core.debug(`Cache Path: ${cachePath}`);
core.debug("Cache Paths:");
core.debug(`${JSON.stringify(cachePaths)}`);
const archiveFolder = await utils.createTempDirectory();
const archivePath = path.join( const archivePath = path.join(
await utils.createTempDirectory(), archiveFolder,
"cache.tgz" utils.getCacheFileName(compressionMethod)
); );
core.debug(`Archive Path: ${archivePath}`); core.debug(`Archive Path: ${archivePath}`);
// http://man7.org/linux/man-pages/man1/tar.1.html await createTar(archiveFolder, cachePaths, compressionMethod);
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
const tarPath = await io.which("tar", true); const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath); const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`); core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) { if (archiveFileSize > fileSizeLimit) {
utils.logWarning( utils.logWarning(
`Cache size of ~${Math.round( `Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024 * 1024) archiveFileSize / (1024 * 1024)
)} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` )} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
); );
return; return;
} }
core.debug("Saving Cache"); core.debug(`Saving Cache (ID: ${cacheId})`);
await cacheHttpClient.saveCache(cacheId, archivePath); await cacheHttpClient.saveCache(cacheId, archivePath);
} catch (error) { } catch (error) {
utils.logWarning(error.message); utils.logWarning(error.message);

87
src/tar.ts Normal file
View File

@ -0,0 +1,87 @@
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import { existsSync, writeFileSync } from "fs";
import * as path from "path";
import { CompressionMethod } from "./constants";
import * as utils from "./utils/actionUtils";
async function getTarPath(args: string[]): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) {
return systemTar;
} else if (await utils.useGnuTar()) {
args.push("--force-local");
}
}
return await io.which("tar", true);
}
async function execTar(args: string[], cwd?: string): Promise<void> {
try {
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`);
}
}
function getWorkingDirectory(): string {
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
}
export async function extractTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
await io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const args = [
...(compressionMethod == CompressionMethod.Zstd
? ["--use-compress-program", "zstd -d --long=30"]
: ["-z"]),
"-xf",
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-P",
"-C",
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
];
await execTar(args);
}
export async function createTar(
archiveFolder: string,
sourceDirectories: string[],
compressionMethod: CompressionMethod
): Promise<void> {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = "manifest.txt";
const cacheFileName = utils.getCacheFileName(compressionMethod);
writeFileSync(
path.join(archiveFolder, manifestFilename),
sourceDirectories.join("\n")
);
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const workingDirectory = getWorkingDirectory();
const args = [
...(compressionMethod == CompressionMethod.Zstd
? ["--use-compress-program", "zstd -T0 --long=30"]
: ["-z"]),
"-cf",
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-P",
"-C",
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
"--files-from",
manifestFilename
];
await execTar(args, archiveFolder);
}

View File

@ -1,11 +1,19 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io"; import * as io from "@actions/io";
import * as fs from "fs"; import * as fs from "fs";
import * as os from "os";
import * as path from "path"; import * as path from "path";
import * as util from "util";
import * as uuidV4 from "uuid/v4"; import * as uuidV4 from "uuid/v4";
import { Events, Outputs, State } from "../constants"; import {
CacheFilename,
CompressionMethod,
Events,
Outputs,
State
} from "../constants";
import { ArtifactCacheEntry } from "../contracts"; import { ArtifactCacheEntry } from "../contracts";
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
@ -28,6 +36,7 @@ export async function createTempDirectory(): Promise<string> {
} }
tempDirectory = path.join(baseLocation, "actions", "temp"); tempDirectory = path.join(baseLocation, "actions", "temp");
} }
const dest = path.join(tempDirectory, uuidV4.default()); const dest = path.join(tempDirectory, uuidV4.default());
await io.mkdirP(dest); await io.mkdirP(dest);
return dest; return dest;
@ -82,16 +91,21 @@ export function logWarning(message: string): void {
core.info(`${warningPrefix}${message}`); core.info(`${warningPrefix}${message}`);
} }
export function resolvePath(filePath: string): string { export async function resolvePaths(patterns: string[]): Promise<string[]> {
if (filePath[0] === "~") { const paths: string[] = [];
const home = os.homedir(); const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
if (!home) { const globber = await glob.create(patterns.join("\n"), {
throw new Error("Unable to resolve `~` to HOME"); implicitDescendants: false
} });
return path.join(home, filePath.slice(1));
for await (const file of globber.globGenerator()) {
const relativeFile = path.relative(workspace, file);
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
} }
return path.resolve(filePath); return paths;
} }
export function getSupportedEvents(): string[] { export function getSupportedEvents(): string[] {
@ -105,3 +119,48 @@ export function isValidEvent(): boolean {
const githubEvent = process.env[Events.Key] || ""; const githubEvent = process.env[Events.Key] || "";
return getSupportedEvents().includes(githubEvent); return getSupportedEvents().includes(githubEvent);
} }
export function unlinkFile(path: fs.PathLike): Promise<void> {
return util.promisify(fs.unlink)(path);
}
async function getVersion(app: string): Promise<string> {
core.debug(`Checking ${app} --version`);
let versionOutput = "";
try {
await exec.exec(`${app} --version`, [], {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data: Buffer): string =>
(versionOutput += data.toString()),
stderr: (data: Buffer): string =>
(versionOutput += data.toString())
}
});
} catch (err) {
core.debug(err.message);
}
versionOutput = versionOutput.trim();
core.debug(versionOutput);
return versionOutput;
}
export async function getCompressionMethod(): Promise<CompressionMethod> {
const versionOutput = await getVersion("zstd");
return versionOutput.toLowerCase().includes("zstd command line interface")
? CompressionMethod.Zstd
: CompressionMethod.Gzip;
}
export function getCacheFileName(compressionMethod: CompressionMethod): string {
return compressionMethod == CompressionMethod.Zstd
? CacheFilename.Zstd
: CacheFilename.Gzip;
}
export async function useGnuTar(): Promise<boolean> {
const versionOutput = await getVersion("tar");
return versionOutput.toLowerCase().includes("gnu tar");
}