mirror of
https://github.com/actions/cache.git
synced 2025-06-24 19:31:10 +02:00
Compare commits
44 Commits
timeout-en
...
v2.1.0
Author | SHA1 | Date | |
---|---|---|---|
d29c1df198 | |||
d59a1464f2 | |||
481a91ba10 | |||
4978dc4f31 | |||
66891cb075 | |||
5c77066753 | |||
24fb121989 | |||
55bbcc2eab | |||
c5475843b3 | |||
0065ae9713 | |||
4aa79d91d3 | |||
5474af707f | |||
591646a21e | |||
eed9cfe64d | |||
b773382817 | |||
984ce638f0 | |||
ff937cc950 | |||
d60d2bef10 | |||
e561127c3e | |||
b8204782bb | |||
e6c708b5ce | |||
581312be20 | |||
9ab95382c8 | |||
6c7d57dc97 | |||
2b83e91661 | |||
1034aaeec8 | |||
bcc23b930f | |||
249a22026d | |||
7f9517a009 | |||
16a133d9a7 | |||
46fead7f5e | |||
bac1a40c81 | |||
916cc60b3c | |||
4967c8e6c5 | |||
a0024e2bd0 | |||
5ddc028cc8 | |||
05b13411a0 | |||
e756b19f93 | |||
354f70a56c | |||
ddc4681e8d | |||
29b4783cc7 | |||
2403bbedac | |||
ccc66f769e | |||
5d8c995f20 |
33
.github/workflows/codeql.yml
vendored
33
.github/workflows/codeql.yml
vendored
@ -1,24 +1,30 @@
|
||||
name: "Code Scanning - Action"
|
||||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
- cron: '0 19 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
# CodeQL runs on ubuntu-latest and windows-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# We must fetch at least the immediate parents so that if this is
|
||||
# a pull request then we can checkout the head.
|
||||
fetch-depth: 2
|
||||
|
||||
# If this run was triggered by a pull request event, then checkout
|
||||
# the head of the pull request instead of the merge commit.
|
||||
- run: git checkout HEAD^2
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
@ -27,9 +33,20 @@ jobs:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
|
25
.github/workflows/workflow.yml
vendored
25
.github/workflows/workflow.yml
vendored
@ -3,12 +3,14 @@ name: Tests
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
@ -17,7 +19,7 @@ jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -32,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
echo "::set-output name=dir::$(npm config get cache)"
|
||||
- name: Restore npm cache
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.npm-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@ -45,12 +47,23 @@ jobs:
|
||||
run: npm run lint
|
||||
- name: Build & Test
|
||||
run: npm run test
|
||||
- name: Ensure dist/ folder is up-to-date
|
||||
if: ${{ runner.os == 'Linux' }}
|
||||
shell: bash
|
||||
run: |
|
||||
npm run build
|
||||
if [ "$(git diff --ignore-space-at-eol | wc -l)" -gt "0" ]; then
|
||||
echo "Detected uncommitted changes after build. See status below:"
|
||||
git diff
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
# End to end save and restore
|
||||
test-save:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
@ -73,7 +86,7 @@ jobs:
|
||||
needs: test-save
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
|
73
README.md
73
README.md
@ -2,12 +2,34 @@
|
||||
|
||||
This action allows caching dependencies and build outputs to improve workflow execution time.
|
||||
|
||||
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
|
||||
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=main&event=push"></a>
|
||||
|
||||
## Documentation
|
||||
|
||||
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
|
||||
|
||||
## What's New
|
||||
|
||||
* Added support for multiple paths, [glob patterns](https://github.com/actions/toolkit/tree/main/packages/glob), and single file caches.
|
||||
|
||||
```yaml
|
||||
- name: Cache multiple paths
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/cache
|
||||
!~/cache/exclude
|
||||
**/node_modules
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
|
||||
* Increased performance and improved cache sizes using `zstd` compression for Linux and macOS runners
|
||||
* Allowed caching for all events with a ref. See [events that trigger workflow](https://help.github.com/en/actions/reference/events-that-trigger-workflows) for info on which events do not have a `GITHUB_REF`
|
||||
* Released the [`@actions/cache`](https://github.com/actions/toolkit/tree/main/packages/cache) npm package to allow other actions to utilize caching
|
||||
* Added a best-effort cleanup step to delete the archive after extraction to reduce storage space
|
||||
|
||||
Refer [here](https://github.com/actions/cache/blob/v1/README.md) for previous versions
|
||||
|
||||
## Usage
|
||||
|
||||
### Pre-requisites
|
||||
@ -15,7 +37,7 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
|
||||
|
||||
### Inputs
|
||||
|
||||
* `path` - A directory to store and save the cache
|
||||
* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
||||
* `key` - An explicit key for restoring and saving the cache
|
||||
* `restore-keys` - An ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||
|
||||
@ -25,6 +47,11 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
|
||||
|
||||
> See [Skipping steps based on cache-hit](#Skipping-steps-based-on-cache-hit) for info on using this output
|
||||
|
||||
### Cache scopes
|
||||
The cache is scoped to the key and branch. The default branch cache is available to other branches.
|
||||
|
||||
See [Matching a cache key](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key) for more info.
|
||||
|
||||
### Example workflow
|
||||
|
||||
```yaml
|
||||
@ -41,7 +68,7 @@ jobs:
|
||||
|
||||
- name: Cache Primes
|
||||
id: cache-primes
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: prime-numbers
|
||||
key: ${{ runner.os }}-primes
|
||||
@ -61,6 +88,7 @@ Every programming language and framework has its own way of caching.
|
||||
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
||||
|
||||
- [C# - Nuget](./examples.md#c---nuget)
|
||||
- [D - DUB](./examples.md#d---dub)
|
||||
- [Elixir - Mix](./examples.md#elixir---mix)
|
||||
- [Go - Modules](./examples.md#go---modules)
|
||||
- [Haskell - Cabal](./examples.md#haskell---cabal)
|
||||
@ -69,7 +97,7 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
||||
- [Node - npm](./examples.md#node---npm)
|
||||
- [Node - Lerna](./examples.md#node---lerna)
|
||||
- [Node - Yarn](./examples.md#node---yarn)
|
||||
- [OCaml/Reason - esy](./examples.md##ocamlreason---esy)
|
||||
- [OCaml/Reason - esy](./examples.md#ocamlreason---esy)
|
||||
- [PHP - Composer](./examples.md#php---composer)
|
||||
- [Python - pip](./examples.md#python---pip)
|
||||
- [R - renv](./examples.md#r---renv)
|
||||
@ -80,6 +108,39 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
||||
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
|
||||
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
|
||||
|
||||
## Creating a cache key
|
||||
|
||||
A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions.
|
||||
|
||||
For example, using the [`hashFiles`](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles) function allows you to create a new cache when dependencies change.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
path/to/dependencies
|
||||
some/other/dependencies
|
||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
|
||||
Additionally, you can use arbitrary command output in a cache key, such as a date or software version:
|
||||
|
||||
```yaml
|
||||
# http://man7.org/linux/man-pages/man1/date.1.html
|
||||
- name: Get Date
|
||||
id: get-date
|
||||
run: |
|
||||
echo "::set-output name=date::$(/bin/date -u "+%Y%m%d")"
|
||||
shell: bash
|
||||
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }}
|
||||
```
|
||||
|
||||
See [Using contexts to create cache keys](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#using-contexts-to-create-cache-keys)
|
||||
|
||||
## Cache Limits
|
||||
|
||||
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
||||
@ -93,7 +154,7 @@ Example:
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
id: cache
|
||||
with:
|
||||
path: path/to/dependencies
|
||||
@ -107,7 +168,7 @@ steps:
|
||||
> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outputs.cache-hit`)
|
||||
|
||||
## Contributing
|
||||
We would love for you to contribute to `@actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
|
||||
We would love for you to contribute to `actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
|
||||
|
||||
## License
|
||||
The scripts and documentation in this project are released under the [MIT License](LICENSE)
|
||||
|
@ -1,97 +1,72 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as io from "@actions/io";
|
||||
import { promises as fs } from "fs";
|
||||
import * as os from "os";
|
||||
import * as path from "path";
|
||||
|
||||
import { Events, Outputs, State } from "../src/constants";
|
||||
import { ArtifactCacheEntry } from "../src/contracts";
|
||||
import { Events, Outputs, RefKey, State } from "../src/constants";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
|
||||
import uuid = require("uuid");
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("@actions/core");
|
||||
jest.mock("os");
|
||||
|
||||
function getTempDir(): string {
|
||||
return path.join(__dirname, "_temp", "actionUtils");
|
||||
}
|
||||
beforeAll(() => {
|
||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
||||
return jest.requireActual("@actions/core").getInput(name, options);
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
delete process.env["GITHUB_WORKSPACE"];
|
||||
await io.rmRF(getTempDir());
|
||||
});
|
||||
|
||||
test("getArchiveFileSize returns file size", () => {
|
||||
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
|
||||
|
||||
const size = actionUtils.getArchiveFileSize(filePath);
|
||||
|
||||
expect(size).toBe(11);
|
||||
});
|
||||
|
||||
test("isExactKeyMatch with undefined cache entry returns false", () => {
|
||||
test("isExactKeyMatch with undefined cache key returns false", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry = undefined;
|
||||
const cacheKey = undefined;
|
||||
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
||||
});
|
||||
|
||||
test("isExactKeyMatch with empty cache entry returns false", () => {
|
||||
test("isExactKeyMatch with empty cache key returns false", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry: ArtifactCacheEntry = {};
|
||||
const cacheKey = "";
|
||||
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
||||
});
|
||||
|
||||
test("isExactKeyMatch with different keys returns false", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "linux-"
|
||||
};
|
||||
const cacheKey = "linux-";
|
||||
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
||||
});
|
||||
|
||||
test("isExactKeyMatch with different key accents returns false", () => {
|
||||
const key = "linux-áccent";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "linux-accent"
|
||||
};
|
||||
const cacheKey = "linux-accent";
|
||||
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
||||
});
|
||||
|
||||
test("isExactKeyMatch with same key returns true", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "linux-rust"
|
||||
};
|
||||
const cacheKey = "linux-rust";
|
||||
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
|
||||
});
|
||||
|
||||
test("isExactKeyMatch with same key and different casing returns true", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "LINUX-RUST"
|
||||
};
|
||||
const cacheKey = "LINUX-RUST";
|
||||
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
|
||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
|
||||
});
|
||||
|
||||
test("setOutputAndState with undefined entry to set cache-hit output", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry = undefined;
|
||||
const cacheKey = undefined;
|
||||
|
||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||
const saveStateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
actionUtils.setOutputAndState(key, cacheEntry);
|
||||
actionUtils.setOutputAndState(key, cacheKey);
|
||||
|
||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||
@ -101,43 +76,33 @@ test("setOutputAndState with undefined entry to set cache-hit output", () => {
|
||||
|
||||
test("setOutputAndState with exact match to set cache-hit output and state", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "linux-rust"
|
||||
};
|
||||
const cacheKey = "linux-rust";
|
||||
|
||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||
const saveStateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
actionUtils.setOutputAndState(key, cacheEntry);
|
||||
actionUtils.setOutputAndState(key, cacheKey);
|
||||
|
||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(saveStateMock).toHaveBeenCalledWith(
|
||||
State.CacheResult,
|
||||
JSON.stringify(cacheEntry)
|
||||
);
|
||||
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
|
||||
const key = "linux-rust";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"
|
||||
};
|
||||
const cacheKey = "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||
const saveStateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
actionUtils.setOutputAndState(key, cacheEntry);
|
||||
actionUtils.setOutputAndState(key, cacheKey);
|
||||
|
||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(saveStateMock).toHaveBeenCalledWith(
|
||||
State.CacheResult,
|
||||
JSON.stringify(cacheEntry)
|
||||
);
|
||||
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
|
||||
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@ -151,27 +116,23 @@ test("getCacheState with no state returns undefined", () => {
|
||||
|
||||
expect(state).toBe(undefined);
|
||||
|
||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
|
||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
|
||||
expect(getStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("getCacheState with valid state", () => {
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const cacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
|
||||
const getStateMock = jest.spyOn(core, "getState");
|
||||
getStateMock.mockImplementation(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return cacheKey;
|
||||
});
|
||||
|
||||
const state = actionUtils.getCacheState();
|
||||
|
||||
expect(state).toEqual(cacheEntry);
|
||||
expect(state).toEqual(cacheKey);
|
||||
|
||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
|
||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
|
||||
expect(getStateMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
@ -185,7 +146,7 @@ test("logWarning logs a message with a warning prefix", () => {
|
||||
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`);
|
||||
});
|
||||
|
||||
test("isValidEvent returns false for unknown event", () => {
|
||||
test("isValidEvent returns false for event that does not have a branch or tag", () => {
|
||||
const event = "foo";
|
||||
process.env[Events.Key] = event;
|
||||
|
||||
@ -194,164 +155,42 @@ test("isValidEvent returns false for unknown event", () => {
|
||||
expect(isValidEvent).toBe(false);
|
||||
});
|
||||
|
||||
test("resolvePaths with no ~ in path", async () => {
|
||||
const filePath = ".cache";
|
||||
|
||||
// Create the following layout:
|
||||
// cwd
|
||||
// cwd/.cache
|
||||
// cwd/.cache/file.txt
|
||||
|
||||
const root = path.join(getTempDir(), "no-tilde");
|
||||
// tarball entries will be relative to workspace
|
||||
process.env["GITHUB_WORKSPACE"] = root;
|
||||
|
||||
await fs.mkdir(root, { recursive: true });
|
||||
const cache = path.join(root, ".cache");
|
||||
await fs.mkdir(cache, { recursive: true });
|
||||
await fs.writeFile(path.join(cache, "file.txt"), "cached");
|
||||
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
process.chdir(root);
|
||||
|
||||
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||
|
||||
const expectedPath = [filePath];
|
||||
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
|
||||
test("resolvePaths with ~ in path", async () => {
|
||||
const cacheDir = uuid();
|
||||
const filePath = `~/${cacheDir}`;
|
||||
// Create the following layout:
|
||||
// ~/uuid
|
||||
// ~/uuid/file.txt
|
||||
|
||||
const homedir = jest.requireActual("os").homedir();
|
||||
const homedirMock = jest.spyOn(os, "homedir");
|
||||
homedirMock.mockImplementation(() => {
|
||||
return homedir;
|
||||
});
|
||||
|
||||
const target = path.join(homedir, cacheDir);
|
||||
await fs.mkdir(target, { recursive: true });
|
||||
await fs.writeFile(path.join(target, "file.txt"), "cached");
|
||||
|
||||
const root = getTempDir();
|
||||
process.env["GITHUB_WORKSPACE"] = root;
|
||||
|
||||
try {
|
||||
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||
|
||||
const expectedPath = [path.relative(root, target)];
|
||||
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||
} finally {
|
||||
await io.rmRF(target);
|
||||
}
|
||||
});
|
||||
|
||||
test("resolvePaths with home not found", async () => {
|
||||
const filePath = "~/.cache/yarn";
|
||||
const homedirMock = jest.spyOn(os, "homedir");
|
||||
homedirMock.mockImplementation(() => {
|
||||
return "";
|
||||
});
|
||||
|
||||
await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
|
||||
"Unable to determine HOME directory"
|
||||
);
|
||||
});
|
||||
|
||||
test("resolvePaths inclusion pattern returns found", async () => {
|
||||
const pattern = "*.ts";
|
||||
// Create the following layout:
|
||||
// inclusion-patterns
|
||||
// inclusion-patterns/miss.txt
|
||||
// inclusion-patterns/test.ts
|
||||
|
||||
const root = path.join(getTempDir(), "inclusion-patterns");
|
||||
// tarball entries will be relative to workspace
|
||||
process.env["GITHUB_WORKSPACE"] = root;
|
||||
|
||||
await fs.mkdir(root, { recursive: true });
|
||||
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||
await fs.writeFile(path.join(root, "test.ts"), "match");
|
||||
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
process.chdir(root);
|
||||
|
||||
const resolvedPath = await actionUtils.resolvePaths([pattern]);
|
||||
|
||||
const expectedPath = ["test.ts"];
|
||||
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
|
||||
test("resolvePaths exclusion pattern returns not found", async () => {
|
||||
const patterns = ["*.ts", "!test.ts"];
|
||||
// Create the following layout:
|
||||
// exclusion-patterns
|
||||
// exclusion-patterns/miss.txt
|
||||
// exclusion-patterns/test.ts
|
||||
|
||||
const root = path.join(getTempDir(), "exclusion-patterns");
|
||||
// tarball entries will be relative to workspace
|
||||
process.env["GITHUB_WORKSPACE"] = root;
|
||||
|
||||
await fs.mkdir(root, { recursive: true });
|
||||
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||
await fs.writeFile(path.join(root, "test.ts"), "no match");
|
||||
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
try {
|
||||
process.chdir(root);
|
||||
|
||||
const resolvedPath = await actionUtils.resolvePaths(patterns);
|
||||
|
||||
const expectedPath = [];
|
||||
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
|
||||
test("isValidEvent returns true for push event", () => {
|
||||
test("isValidEvent returns true for event that has a ref", () => {
|
||||
const event = Events.Push;
|
||||
process.env[Events.Key] = event;
|
||||
process.env[RefKey] = "ref/heads/feature";
|
||||
|
||||
const isValidEvent = actionUtils.isValidEvent();
|
||||
|
||||
expect(isValidEvent).toBe(true);
|
||||
});
|
||||
|
||||
test("isValidEvent returns true for pull request event", () => {
|
||||
const event = Events.PullRequest;
|
||||
process.env[Events.Key] = event;
|
||||
|
||||
const isValidEvent = actionUtils.isValidEvent();
|
||||
|
||||
expect(isValidEvent).toBe(true);
|
||||
test("getInputAsArray returns empty array if not required and missing", () => {
|
||||
expect(actionUtils.getInputAsArray("foo")).toEqual([]);
|
||||
});
|
||||
|
||||
test("unlinkFile unlinks file", async () => {
|
||||
const testDirectory = await fs.mkdtemp("unlinkFileTest");
|
||||
const testFile = path.join(testDirectory, "test.txt");
|
||||
await fs.writeFile(testFile, "hello world");
|
||||
|
||||
await actionUtils.unlinkFile(testFile);
|
||||
|
||||
// This should throw as testFile should not exist
|
||||
await expect(fs.stat(testFile)).rejects.toThrow();
|
||||
|
||||
await fs.rmdir(testDirectory);
|
||||
test("getInputAsArray throws error if required and missing", () => {
|
||||
expect(() =>
|
||||
actionUtils.getInputAsArray("foo", { required: true })
|
||||
).toThrowError();
|
||||
});
|
||||
|
||||
test("getInputAsArray handles single line correctly", () => {
|
||||
testUtils.setInput("foo", "bar");
|
||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar"]);
|
||||
});
|
||||
|
||||
test("getInputAsArray handles multiple lines correctly", () => {
|
||||
testUtils.setInput("foo", "bar\nbaz");
|
||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
|
||||
});
|
||||
|
||||
test("getInputAsArray handles different new lines correctly", () => {
|
||||
testUtils.setInput("foo", "bar\r\nbaz");
|
||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
|
||||
});
|
||||
|
||||
test("getInputAsArray handles empty lines correctly", () => {
|
||||
testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n");
|
||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
|
||||
});
|
||||
|
@ -1,39 +0,0 @@
|
||||
import { getCacheVersion } from "../src/cacheHttpClient";
|
||||
import { CompressionMethod, Inputs } from "../src/constants";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
});
|
||||
|
||||
test("getCacheVersion with path input and compression method undefined returns version", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
|
||||
const result = getCacheVersion();
|
||||
|
||||
expect(result).toEqual(
|
||||
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||
);
|
||||
});
|
||||
|
||||
test("getCacheVersion with zstd compression returns version", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const result = getCacheVersion(CompressionMethod.Zstd);
|
||||
|
||||
expect(result).toEqual(
|
||||
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
|
||||
);
|
||||
});
|
||||
|
||||
test("getCacheVersion with gzip compression does not change vesion", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const result = getCacheVersion(CompressionMethod.Gzip);
|
||||
|
||||
expect(result).toEqual(
|
||||
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||
);
|
||||
});
|
||||
|
||||
test("getCacheVersion with no input throws", async () => {
|
||||
expect(() => getCacheVersion()).toThrow();
|
||||
});
|
@ -1,21 +1,11 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as path from "path";
|
||||
|
||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||
import {
|
||||
CacheFilename,
|
||||
CompressionMethod,
|
||||
Events,
|
||||
Inputs
|
||||
} from "../src/constants";
|
||||
import { ArtifactCacheEntry } from "../src/contracts";
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/restore";
|
||||
import * as tar from "../src/tar";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("../src/cacheHttpClient");
|
||||
jest.mock("../src/tar");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
@ -31,24 +21,23 @@ beforeAll(() => {
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getSupportedEvents();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getCacheFileName(cm);
|
||||
});
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("restore with invalid event outputs warning", async () => {
|
||||
@ -56,16 +45,19 @@ test("restore with invalid event outputs warning", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
@ -75,71 +67,89 @@ test("restore with no path should fail", async () => {
|
||||
test("restore with no key", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with too many keys should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with large key should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "foo".repeat(512); // Over the 512 character limit
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with invalid key should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "comma,comma";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run();
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
@ -149,25 +159,28 @@ test("restore with no cache found", async () => {
|
||||
});
|
||||
|
||||
test("restore with server error should fail", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
@ -180,10 +193,11 @@ test("restore with server error should fail", async () => {
|
||||
});
|
||||
|
||||
test("restore with restore keys and no cache found", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
@ -191,14 +205,17 @@ test("restore with restore keys and no cache found", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
clientMock.mockImplementation(() => {
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
@ -207,161 +224,43 @@ test("restore with restore keys and no cache found", async () => {
|
||||
);
|
||||
});
|
||||
|
||||
test("restore with gzip compressed cache found", async () => {
|
||||
test("restore with cache found for key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key
|
||||
});
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Gzip);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
|
||||
const fileSize = 142;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
|
||||
const compression = CompressionMethod.Gzip;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
|
||||
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("restore with a pull request event and zstd compressed cache found", async () => {
|
||||
const key = "node-test";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
key
|
||||
});
|
||||
|
||||
process.env[Events.Key] = Events.PullRequest;
|
||||
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: key,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
|
||||
const fileSize = 62915000;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("restore with cache found for restore key", async () => {
|
||||
const path = "node_modules";
|
||||
const key = "node-test";
|
||||
const restoreKey = "node-";
|
||||
testUtils.setInputs({
|
||||
path: "node_modules",
|
||||
path: path,
|
||||
key,
|
||||
restoreKeys: [restoreKey]
|
||||
});
|
||||
@ -369,60 +268,19 @@ test("restore with cache found for restore key", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const stateMock = jest.spyOn(core, "saveState");
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: restoreKey,
|
||||
scope: "refs/heads/master",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||
getCacheMock.mockImplementation(() => {
|
||||
return Promise.resolve(cacheEntry);
|
||||
});
|
||||
const tempPath = "/foo/bar";
|
||||
|
||||
const createTempDirectoryMock = jest.spyOn(
|
||||
actionUtils,
|
||||
"createTempDirectory"
|
||||
);
|
||||
createTempDirectoryMock.mockImplementation(() => {
|
||||
return Promise.resolve(tempPath);
|
||||
});
|
||||
|
||||
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||
|
||||
const fileSize = 142;
|
||||
const getArchiveFileSizeMock = jest
|
||||
.spyOn(actionUtils, "getArchiveFileSize")
|
||||
.mockReturnValue(fileSize);
|
||||
|
||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
const restoreCacheMock = jest
|
||||
.spyOn(cache, "restoreCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
||||
|
||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||
|
||||
@ -430,5 +288,4 @@ test("restore with cache found for restore key", async () => {
|
||||
`Cache restored from key: ${restoreKey}`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
@ -1,22 +1,13 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as path from "path";
|
||||
|
||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||
import {
|
||||
CacheFilename,
|
||||
CompressionMethod,
|
||||
Events,
|
||||
Inputs
|
||||
} from "../src/constants";
|
||||
import { ArtifactCacheEntry } from "../src/contracts";
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/save";
|
||||
import * as tar from "../src/tar";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
jest.mock("@actions/core");
|
||||
jest.mock("../src/cacheHttpClient");
|
||||
jest.mock("../src/tar");
|
||||
jest.mock("@actions/cache");
|
||||
jest.mock("../src/utils/actionUtils");
|
||||
|
||||
beforeAll(() => {
|
||||
@ -28,6 +19,14 @@ beforeAll(() => {
|
||||
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
||||
(name, options) => {
|
||||
return jest
|
||||
.requireActual("../src/utils/actionUtils")
|
||||
.getInputAsArray(name, options);
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||
(key, cacheResult) => {
|
||||
return jest
|
||||
@ -40,35 +39,17 @@ beforeAll(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.isValidEvent();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getSupportedEvents();
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
|
||||
async filePaths => {
|
||||
return filePaths.map(x => path.resolve(x));
|
||||
}
|
||||
);
|
||||
|
||||
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
||||
return Promise.resolve("/foo/bar");
|
||||
});
|
||||
|
||||
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||
return actualUtils.getCacheFileName(cm);
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env[Events.Key] = Events.Push;
|
||||
process.env[RefKey] = "refs/heads/feature-branch";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
delete process.env[Events.Key];
|
||||
delete process.env[RefKey];
|
||||
});
|
||||
|
||||
test("save with invalid event outputs warning", async () => {
|
||||
@ -76,9 +57,10 @@ test("save with invalid event outputs warning", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run();
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
@ -87,25 +69,21 @@ test("save with no primary key in state outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return "";
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Error retrieving key from state.`
|
||||
);
|
||||
@ -118,33 +96,25 @@ test("save with exact match returns early", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: primaryKey,
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const savedCacheKey = primaryKey;
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||
);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
@ -153,25 +123,22 @@ test("save with missing input outputs warning", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: path"
|
||||
);
|
||||
@ -184,17 +151,12 @@ test("save with large cache outputs warning", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
@ -202,36 +164,26 @@ test("save with large cache outputs warning", async () => {
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
|
||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||
return cacheSize;
|
||||
});
|
||||
const compression = CompressionMethod.Gzip;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
});
|
||||
|
||||
await run();
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("save with reserve cache failure outputs warning", async () => {
|
||||
@ -240,17 +192,12 @@ test("save with reserve cache failure outputs warning", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
@ -260,35 +207,26 @@ test("save with reserve cache failure outputs warning", async () => {
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(-1);
|
||||
const actualCache = jest.requireActual("@actions/cache");
|
||||
const error = new actualCache.ReserveCacheError(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
throw error;
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("save with server error outputs warning", async () => {
|
||||
@ -296,17 +234,12 @@ test("save with server error outputs warning", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
@ -314,70 +247,35 @@ test("save with server error outputs warning", async () => {
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "saveCache")
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
throw new Error("HTTP Error Occurred");
|
||||
});
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test("save with valid inputs uploads a cache", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
const savedCacheKey = "Linux-node-";
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
return savedCacheKey;
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
@ -385,44 +283,19 @@ test("save with valid inputs uploads a cache", async () => {
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
const cachePaths = [path.resolve(inputPath)];
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
const saveCacheMock = jest
|
||||
.spyOn(cache, "saveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
const compression = CompressionMethod.Zstd;
|
||||
const getCompressionMock = jest
|
||||
.spyOn(actionUtils, "getCompressionMethod")
|
||||
.mockReturnValue(Promise.resolve(compression));
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||
compressionMethod: compression
|
||||
});
|
||||
|
||||
const archiveFolder = "/foo/bar";
|
||||
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(
|
||||
archiveFolder,
|
||||
cachePaths,
|
||||
compression
|
||||
);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
@ -1,204 +0,0 @@
|
||||
import * as exec from "@actions/exec";
|
||||
import * as io from "@actions/io";
|
||||
import * as path from "path";
|
||||
|
||||
import { CacheFilename, CompressionMethod } from "../src/constants";
|
||||
import * as tar from "../src/tar";
|
||||
import * as utils from "../src/utils/actionUtils";
|
||||
|
||||
import fs = require("fs");
|
||||
|
||||
jest.mock("@actions/exec");
|
||||
jest.mock("@actions/io");
|
||||
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
|
||||
function getTempDir(): string {
|
||||
return path.join(__dirname, "_temp", "tar");
|
||||
}
|
||||
|
||||
beforeAll(async () => {
|
||||
jest.spyOn(io, "which").mockImplementation(tool => {
|
||||
return Promise.resolve(tool);
|
||||
});
|
||||
|
||||
process.env["GITHUB_WORKSPACE"] = process.cwd();
|
||||
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
delete process.env["GITHUB_WORKSPACE"];
|
||||
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||
});
|
||||
|
||||
test("zstd extract tar", async () => {
|
||||
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archivePath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||
: "cache.tar";
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
|
||||
await tar.extractTar(archivePath, CompressionMethod.Zstd);
|
||||
|
||||
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||
const tarPath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||
: "tar";
|
||||
expect(execMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
"--use-compress-program",
|
||||
"zstd -d --long=30",
|
||||
"-xf",
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||
"-P",
|
||||
"-C",
|
||||
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||
],
|
||||
{ cwd: undefined }
|
||||
);
|
||||
});
|
||||
|
||||
test("gzip extract tar", async () => {
|
||||
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
const archivePath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||
: "cache.tar";
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
|
||||
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||
|
||||
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||
const tarPath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||
: "tar";
|
||||
expect(execMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
"-z",
|
||||
"-xf",
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||
"-P",
|
||||
"-C",
|
||||
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||
],
|
||||
{ cwd: undefined }
|
||||
);
|
||||
});
|
||||
|
||||
test("gzip extract GNU tar on windows", async () => {
|
||||
if (IS_WINDOWS) {
|
||||
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
||||
|
||||
const isGnuMock = jest
|
||||
.spyOn(utils, "useGnuTar")
|
||||
.mockReturnValue(Promise.resolve(true));
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
|
||||
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||
|
||||
expect(isGnuMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"tar"`,
|
||||
[
|
||||
"-z",
|
||||
"-xf",
|
||||
archivePath.replace(/\\/g, "/"),
|
||||
"-P",
|
||||
"-C",
|
||||
workspace?.replace(/\\/g, "/"),
|
||||
"--force-local"
|
||||
],
|
||||
{ cwd: undefined }
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
test("zstd create tar", async () => {
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archiveFolder = getTempDir();
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||
|
||||
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||
|
||||
await tar.createTar(
|
||||
archiveFolder,
|
||||
sourceDirectories,
|
||||
CompressionMethod.Zstd
|
||||
);
|
||||
|
||||
const tarPath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||
: "tar";
|
||||
|
||||
expect(execMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
"--use-compress-program",
|
||||
"zstd -T0 --long=30",
|
||||
"-cf",
|
||||
IS_WINDOWS
|
||||
? CacheFilename.Zstd.replace(/\\/g, "/")
|
||||
: CacheFilename.Zstd,
|
||||
"-P",
|
||||
"-C",
|
||||
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||
"--files-from",
|
||||
"manifest.txt"
|
||||
],
|
||||
{
|
||||
cwd: archiveFolder
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
test("gzip create tar", async () => {
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archiveFolder = getTempDir();
|
||||
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||
|
||||
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||
|
||||
await tar.createTar(
|
||||
archiveFolder,
|
||||
sourceDirectories,
|
||||
CompressionMethod.Gzip
|
||||
);
|
||||
|
||||
const tarPath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||
: "tar";
|
||||
|
||||
expect(execMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenCalledWith(
|
||||
`"${tarPath}"`,
|
||||
[
|
||||
"-z",
|
||||
"-cf",
|
||||
IS_WINDOWS
|
||||
? CacheFilename.Gzip.replace(/\\/g, "/")
|
||||
: CacheFilename.Gzip,
|
||||
"-P",
|
||||
"-C",
|
||||
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||
"--files-from",
|
||||
"manifest.txt"
|
||||
],
|
||||
{
|
||||
cwd: archiveFolder
|
||||
}
|
||||
);
|
||||
});
|
@ -3,7 +3,7 @@ description: 'Cache artifacts like dependencies and build outputs to improve wor
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
path:
|
||||
description: 'A directory to store and save the cache'
|
||||
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
||||
required: true
|
||||
key:
|
||||
description: 'An explicit key for restoring and saving the cache'
|
||||
|
44773
dist/restore/index.js
vendored
44773
dist/restore/index.js
vendored
File diff suppressed because one or more lines are too long
44663
dist/save/index.js
vendored
44663
dist/save/index.js
vendored
File diff suppressed because one or more lines are too long
172
examples.md
172
examples.md
@ -2,6 +2,7 @@
|
||||
|
||||
- [Examples](#examples)
|
||||
- [C# - NuGet](#c---nuget)
|
||||
- [D - DUB](#d---dub)
|
||||
- [Elixir - Mix](#elixir---mix)
|
||||
- [Go - Modules](#go---modules)
|
||||
- [Haskell - Cabal](#haskell---cabal)
|
||||
@ -13,6 +14,7 @@
|
||||
- [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config)
|
||||
- [Node - Lerna](#node---lerna)
|
||||
- [Node - Yarn](#node---yarn)
|
||||
- [Node - Yarn 2](#node---yarn-2)
|
||||
- [OCaml/Reason - esy](#ocamlreason---esy)
|
||||
- [PHP - Composer](#php---composer)
|
||||
- [Python - pip](#python---pip)
|
||||
@ -34,7 +36,7 @@
|
||||
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||
@ -43,13 +45,25 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
|
||||
```
|
||||
|
||||
Depending on the environment, huge packages might be pre-installed in the global cache folder.
|
||||
If you do not want to include them, consider to move the cache folder like below.
|
||||
With `actions/cache@v2` you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/main/packages/glob#exclude-patterns)
|
||||
```yaml
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
~/.nuget/packages
|
||||
!~/.nuget/packages/unwanted
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-nuget-
|
||||
```
|
||||
|
||||
Or you could move the cache folder like below.
|
||||
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
|
||||
```yaml
|
||||
env:
|
||||
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
|
||||
steps:
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ github.workspace }}/.nuget/packages
|
||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||
@ -57,9 +71,33 @@ steps:
|
||||
${{ runner.os }}-nuget-
|
||||
```
|
||||
|
||||
## D - DUB
|
||||
|
||||
### POSIX
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.dub
|
||||
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-dub-
|
||||
```
|
||||
|
||||
### Windows
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~\AppData\Local\dub
|
||||
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-dub-
|
||||
```
|
||||
|
||||
## Elixir - Mix
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||
@ -70,7 +108,7 @@ steps:
|
||||
## Go - Modules
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/go/pkg/mod
|
||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||
@ -83,29 +121,24 @@ steps:
|
||||
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
name: Cache ~/.cabal/packages
|
||||
- name: Cache ~/.cabal/packages, ~/.cabal/store and dist-newstyle
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cabal/packages
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
|
||||
- uses: actions/cache@v1
|
||||
name: Cache ~/.cabal/store
|
||||
with:
|
||||
path: ~/.cabal/store
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
|
||||
- uses: actions/cache@v1
|
||||
name: Cache dist-newstyle
|
||||
with:
|
||||
path: dist-newstyle
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
|
||||
path: |
|
||||
~/.cabal/packages
|
||||
~/.cabal/store
|
||||
dist-newstyle
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}
|
||||
```
|
||||
|
||||
## Java - Gradle
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.gradle/caches
|
||||
path: |
|
||||
~/.gradle/caches
|
||||
~/.gradle/wrapper
|
||||
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-gradle-
|
||||
@ -114,7 +147,8 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
||||
## Java - Maven
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- name: Cache local Maven repository
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.m2/repository
|
||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||
@ -126,12 +160,14 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
||||
|
||||
For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache
|
||||
|
||||
If using `npm config` to retrieve the cache directory, ensure you run [actions/setup-node](https://github.com/actions/setup-node) first to ensure your `npm` version is correct.
|
||||
|
||||
>Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci`
|
||||
|
||||
### macOS and Ubuntu
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@ -146,7 +182,7 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
|
||||
id: npm-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(npm config get cache)"
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.npm-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@ -161,7 +197,7 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
|
||||
id: npm-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(npm config get cache)"
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.npm-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
@ -171,10 +207,9 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
|
||||
|
||||
## Node - Lerna
|
||||
|
||||
>Note this example uses the new multi-paths feature and is only available at `master`
|
||||
```yaml
|
||||
- name: restore lerna
|
||||
uses: actions/cache@master
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: |
|
||||
node_modules
|
||||
@ -190,7 +225,25 @@ The yarn cache directory will depend on your operating system and version of `ya
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-yarn-
|
||||
```
|
||||
|
||||
|
||||
## Node - Yarn 2
|
||||
The yarn 2 cache directory will depend on your config. See https://yarnpkg.com/configuration/yarnrc#cacheFolder for more info.
|
||||
|
||||
```yaml
|
||||
- name: Get yarn cache directory path
|
||||
id: yarn-cache-dir-path
|
||||
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
|
||||
|
||||
- uses: actions/cache@v2
|
||||
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
||||
with:
|
||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||
@ -204,7 +257,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
|
||||
```yaml
|
||||
- name: Restore Cache
|
||||
id: restore-cache
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: _export
|
||||
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
|
||||
@ -234,7 +287,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
|
||||
id: composer-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(composer config cache-files-dir)"
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.composer-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
|
||||
@ -253,7 +306,7 @@ Locations:
|
||||
|
||||
### Simple example
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
@ -266,7 +319,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||
### Multiple OS's in a workflow
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
@ -274,7 +327,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
with:
|
||||
path: ~/Library/Caches/pip
|
||||
@ -282,7 +335,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
with:
|
||||
path: ~\AppData\Local\pip\Cache
|
||||
@ -301,7 +354,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||
echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: pip cache
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
@ -318,7 +371,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||
run: |
|
||||
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
@ -337,7 +390,7 @@ Locations:
|
||||
|
||||
### Simple example
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.local/share/renv
|
||||
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||
@ -350,7 +403,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
||||
### Multiple OS's in a workflow
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
with:
|
||||
path: ~/.local/share/renv
|
||||
@ -358,7 +411,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
||||
restore-keys: |
|
||||
${{ runner.os }}-renv-
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
with:
|
||||
path: ~/Library/Application Support/renv
|
||||
@ -366,7 +419,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
||||
restore-keys: |
|
||||
${{ runner.os }}-renv-
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
with:
|
||||
path: ~\AppData\Local\renv
|
||||
@ -378,7 +431,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
||||
## Ruby - Bundler
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: vendor/bundle
|
||||
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
|
||||
@ -397,42 +450,31 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||
## Rust - Cargo
|
||||
|
||||
```yaml
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
- name: Cache cargo build
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
path: |
|
||||
~/.cargo/registry
|
||||
~/.cargo/git
|
||||
target
|
||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
||||
```
|
||||
|
||||
## Scala - SBT
|
||||
|
||||
```yaml
|
||||
- name: Cache SBT ivy cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.ivy2/cache
|
||||
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
|
||||
- name: Cache SBT
|
||||
uses: actions/cache@v1
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ~/.sbt
|
||||
path: |
|
||||
~/.ivy2/cache
|
||||
~/.sbt
|
||||
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
|
||||
```
|
||||
|
||||
## Swift, Objective-C - Carthage
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: Carthage
|
||||
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
|
||||
@ -443,7 +485,7 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||
## Swift, Objective-C - CocoaPods
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: Pods
|
||||
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
|
||||
@ -454,7 +496,7 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||
## Swift - Swift Package Manager
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v2
|
||||
with:
|
||||
path: .build
|
||||
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
|
||||
|
6453
package-lock.json
generated
6453
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cache",
|
||||
"version": "1.1.2",
|
||||
"version": "2.1.0",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
@ -25,16 +25,13 @@
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/http-client": "^1.0.8",
|
||||
"@actions/io": "^1.0.1",
|
||||
"uuid": "^3.3.3"
|
||||
"@actions/cache": "^1.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^24.0.13",
|
||||
"@types/nock": "^11.1.0",
|
||||
"@types/node": "^12.0.4",
|
||||
"@types/uuid": "^3.4.5",
|
||||
"@typescript-eslint/eslint-plugin": "^2.7.0",
|
||||
"@typescript-eslint/parser": "^2.7.0",
|
||||
"@zeit/ncc": "^0.20.5",
|
||||
|
@ -1,352 +0,0 @@
|
||||
import * as core from "@actions/core";
|
||||
import { HttpClient, HttpCodes } from "@actions/http-client";
|
||||
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
||||
import {
|
||||
IHttpClientResponse,
|
||||
IRequestOptions,
|
||||
ITypedResponse
|
||||
} from "@actions/http-client/interfaces";
|
||||
import * as crypto from "crypto";
|
||||
import * as fs from "fs";
|
||||
import * as stream from "stream";
|
||||
import * as util from "util";
|
||||
|
||||
import { CompressionMethod, Inputs, SocketTimeout } from "./constants";
|
||||
import {
|
||||
ArtifactCacheEntry,
|
||||
CacheOptions,
|
||||
CommitCacheRequest,
|
||||
ReserveCacheRequest,
|
||||
ReserveCacheResponse
|
||||
} from "./contracts";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
const versionSalt = "1.0";
|
||||
|
||||
function isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false;
|
||||
}
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
|
||||
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false;
|
||||
}
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
return retryableStatusCodes.includes(statusCode);
|
||||
}
|
||||
|
||||
function getCacheApiUrl(resource: string): string {
|
||||
// Ideally we just use ACTIONS_CACHE_URL
|
||||
const baseUrl: string = (
|
||||
process.env["ACTIONS_CACHE_URL"] ||
|
||||
process.env["ACTIONS_RUNTIME_URL"] ||
|
||||
""
|
||||
).replace("pipelines", "artifactcache");
|
||||
if (!baseUrl) {
|
||||
throw new Error(
|
||||
"Cache Service Url not found, unable to restore cache."
|
||||
);
|
||||
}
|
||||
|
||||
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
||||
core.debug(`Resource Url: ${url}`);
|
||||
return url;
|
||||
}
|
||||
|
||||
function createAcceptHeader(type: string, apiVersion: string): string {
|
||||
return `${type};api-version=${apiVersion}`;
|
||||
}
|
||||
|
||||
function getRequestOptions(): IRequestOptions {
|
||||
const requestOptions: IRequestOptions = {
|
||||
headers: {
|
||||
Accept: createAcceptHeader("application/json", "6.0-preview.1")
|
||||
}
|
||||
};
|
||||
|
||||
return requestOptions;
|
||||
}
|
||||
|
||||
function createHttpClient(): HttpClient {
|
||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||
|
||||
return new HttpClient(
|
||||
"actions/cache",
|
||||
[bearerCredentialHandler],
|
||||
getRequestOptions()
|
||||
);
|
||||
}
|
||||
|
||||
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
|
||||
const components = [core.getInput(Inputs.Path, { required: true })].concat(
|
||||
compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : []
|
||||
);
|
||||
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
|
||||
return crypto
|
||||
.createHash("sha256")
|
||||
.update(components.join("|"))
|
||||
.digest("hex");
|
||||
}
|
||||
|
||||
export async function getCacheEntry(
|
||||
keys: string[],
|
||||
options?: CacheOptions
|
||||
): Promise<ArtifactCacheEntry | null> {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(options?.compressionMethod);
|
||||
const resource = `cache?keys=${encodeURIComponent(
|
||||
keys.join(",")
|
||||
)}&version=${version}`;
|
||||
|
||||
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
||||
getCacheApiUrl(resource)
|
||||
);
|
||||
if (response.statusCode === 204) {
|
||||
return null;
|
||||
}
|
||||
if (!isSuccessStatusCode(response.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||
}
|
||||
|
||||
const cacheResult = response.result;
|
||||
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
||||
if (!cacheDownloadUrl) {
|
||||
throw new Error("Cache not found.");
|
||||
}
|
||||
core.setSecret(cacheDownloadUrl);
|
||||
core.debug(`Cache Result:`);
|
||||
core.debug(JSON.stringify(cacheResult));
|
||||
|
||||
return cacheResult;
|
||||
}
|
||||
|
||||
async function pipeResponseToStream(
|
||||
response: IHttpClientResponse,
|
||||
output: NodeJS.WritableStream
|
||||
): Promise<void> {
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
await pipeline(response.message, output);
|
||||
}
|
||||
|
||||
export async function downloadCache(
|
||||
archiveLocation: string,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createWriteStream(archivePath);
|
||||
const httpClient = new HttpClient("actions/cache");
|
||||
const downloadResponse = await httpClient.get(archiveLocation);
|
||||
|
||||
// Abort download if no traffic received over the socket.
|
||||
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
||||
downloadResponse.message.destroy();
|
||||
core.debug(
|
||||
`Aborting download, socket timed out after ${SocketTimeout} ms`
|
||||
);
|
||||
});
|
||||
|
||||
await pipeResponseToStream(downloadResponse, stream);
|
||||
|
||||
// Validate download size.
|
||||
const contentLengthHeader =
|
||||
downloadResponse.message.headers["content-length"];
|
||||
|
||||
if (contentLengthHeader) {
|
||||
const expectedLength = parseInt(contentLengthHeader);
|
||||
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||
|
||||
if (actualLength != expectedLength) {
|
||||
throw new Error(
|
||||
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
core.debug("Unable to validate download, no Content-Length header");
|
||||
}
|
||||
}
|
||||
|
||||
// Reserve Cache
|
||||
export async function reserveCache(
|
||||
key: string,
|
||||
options?: CacheOptions
|
||||
): Promise<number> {
|
||||
const httpClient = createHttpClient();
|
||||
const version = getCacheVersion(options?.compressionMethod);
|
||||
|
||||
const reserveCacheRequest: ReserveCacheRequest = {
|
||||
key,
|
||||
version
|
||||
};
|
||||
const response = await httpClient.postJson<ReserveCacheResponse>(
|
||||
getCacheApiUrl("caches"),
|
||||
reserveCacheRequest
|
||||
);
|
||||
return response?.result?.cacheId ?? -1;
|
||||
}
|
||||
|
||||
function getContentRange(start: number, end: number): string {
|
||||
// Format: `bytes start-end/filesize
|
||||
// start and end are inclusive
|
||||
// filesize can be *
|
||||
// For a 200 byte chunk starting at byte 0:
|
||||
// Content-Range: bytes 0-199/*
|
||||
return `bytes ${start}-${end}/*`;
|
||||
}
|
||||
|
||||
async function uploadChunk(
|
||||
httpClient: HttpClient,
|
||||
resourceUrl: string,
|
||||
data: NodeJS.ReadableStream,
|
||||
start: number,
|
||||
end: number
|
||||
): Promise<void> {
|
||||
core.debug(
|
||||
`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(
|
||||
start,
|
||||
end
|
||||
)}`
|
||||
);
|
||||
const additionalHeaders = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Range": getContentRange(start, end)
|
||||
};
|
||||
|
||||
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
||||
return await httpClient.sendStream(
|
||||
"PATCH",
|
||||
resourceUrl,
|
||||
data,
|
||||
additionalHeaders
|
||||
);
|
||||
};
|
||||
|
||||
const response = await uploadChunkRequest();
|
||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isRetryableStatusCode(response.message.statusCode)) {
|
||||
core.debug(
|
||||
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
|
||||
);
|
||||
const retryResponse = await uploadChunkRequest();
|
||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Cache service responded with ${response.message.statusCode} during chunk upload.`
|
||||
);
|
||||
}
|
||||
|
||||
function parseEnvNumber(key: string): number | undefined {
|
||||
const value = Number(process.env[key]);
|
||||
if (Number.isNaN(value) || value < 0) {
|
||||
return undefined;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async function uploadFile(
|
||||
httpClient: HttpClient,
|
||||
cacheId: number,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
// Upload Chunks
|
||||
const fileSize = fs.statSync(archivePath).size;
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs.openSync(archivePath, "r");
|
||||
|
||||
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
|
||||
const MAX_CHUNK_SIZE =
|
||||
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
|
||||
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
||||
|
||||
const parallelUploads = [...new Array(concurrency).keys()];
|
||||
core.debug("Awaiting all uploads");
|
||||
let offset = 0;
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
parallelUploads.map(async () => {
|
||||
while (offset < fileSize) {
|
||||
const chunkSize = Math.min(
|
||||
fileSize - offset,
|
||||
MAX_CHUNK_SIZE
|
||||
);
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
const chunk = fs.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
});
|
||||
|
||||
await uploadChunk(
|
||||
httpClient,
|
||||
resourceUrl,
|
||||
chunk,
|
||||
start,
|
||||
end
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
} finally {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
async function commitCache(
|
||||
httpClient: HttpClient,
|
||||
cacheId: number,
|
||||
filesize: number
|
||||
): Promise<ITypedResponse<null>> {
|
||||
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
||||
return await httpClient.postJson<null>(
|
||||
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||
commitCacheRequest
|
||||
);
|
||||
}
|
||||
|
||||
export async function saveCache(
|
||||
cacheId: number,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const httpClient = createHttpClient();
|
||||
|
||||
core.debug("Upload cache");
|
||||
await uploadFile(httpClient, cacheId, archivePath);
|
||||
|
||||
// Commit Cache
|
||||
core.debug("Commiting cache");
|
||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||
const commitCacheResponse = await commitCache(
|
||||
httpClient,
|
||||
cacheId,
|
||||
cacheSize
|
||||
);
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||
);
|
||||
}
|
||||
|
||||
core.info("Cache saved successfully");
|
||||
}
|
@ -9,8 +9,8 @@ export enum Outputs {
|
||||
}
|
||||
|
||||
export enum State {
|
||||
CacheKey = "CACHE_KEY",
|
||||
CacheResult = "CACHE_RESULT"
|
||||
CachePrimaryKey = "CACHE_KEY",
|
||||
CacheMatchedKey = "CACHE_RESULT"
|
||||
}
|
||||
|
||||
export enum Events {
|
||||
@ -19,17 +19,4 @@ export enum Events {
|
||||
PullRequest = "pull_request"
|
||||
}
|
||||
|
||||
export enum CacheFilename {
|
||||
Gzip = "cache.tgz",
|
||||
Zstd = "cache.tzst"
|
||||
}
|
||||
|
||||
export enum CompressionMethod {
|
||||
Gzip = "gzip",
|
||||
Zstd = "zstd"
|
||||
}
|
||||
|
||||
// Socket timeout in milliseconds during download. If no traffic is received
|
||||
// over the socket during this period, the socket is destroyed and the download
|
||||
// is aborted.
|
||||
export const SocketTimeout = 5000;
|
||||
export const RefKey = "GITHUB_REF";
|
||||
|
25
src/contracts.d.ts
vendored
25
src/contracts.d.ts
vendored
@ -1,25 +0,0 @@
|
||||
import { CompressionMethod } from "./constants";
|
||||
|
||||
export interface ArtifactCacheEntry {
|
||||
cacheKey?: string;
|
||||
scope?: string;
|
||||
creationTime?: string;
|
||||
archiveLocation?: string;
|
||||
}
|
||||
|
||||
export interface CommitCacheRequest {
|
||||
size: number;
|
||||
}
|
||||
|
||||
export interface ReserveCacheRequest {
|
||||
key: string;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface ReserveCacheResponse {
|
||||
cacheId: number;
|
||||
}
|
||||
|
||||
export interface CacheOptions {
|
||||
compressionMethod?: CompressionMethod;
|
||||
}
|
114
src/restore.ts
114
src/restore.ts
@ -1,9 +1,7 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as path from "path";
|
||||
|
||||
import * as cacheHttpClient from "./cacheHttpClient";
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import { extractTar } from "./tar";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
@ -13,103 +11,49 @@ async function run(): Promise<void> {
|
||||
utils.logWarning(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported. Only ${utils
|
||||
.getSupportedEvents()
|
||||
.join(", ")} events are supported at this time.`
|
||||
} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||
core.saveState(State.CacheKey, primaryKey);
|
||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
||||
|
||||
const restoreKeys = core
|
||||
.getInput(Inputs.RestoreKeys)
|
||||
.split("\n")
|
||||
.filter(x => x !== "");
|
||||
const keys = [primaryKey, ...restoreKeys];
|
||||
|
||||
core.debug("Resolved Keys:");
|
||||
core.debug(JSON.stringify(keys));
|
||||
|
||||
if (keys.length > 10) {
|
||||
core.setFailed(
|
||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
for (const key of keys) {
|
||||
if (key.length > 512) {
|
||||
core.setFailed(
|
||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
const regex = /^[^,]*$/;
|
||||
if (!regex.test(key)) {
|
||||
core.setFailed(
|
||||
`Key Validation Error: ${key} cannot contain commas.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod();
|
||||
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
|
||||
try {
|
||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
|
||||
compressionMethod: compressionMethod
|
||||
});
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
||||
const cacheKey = await cache.restoreCache(
|
||||
cachePaths,
|
||||
primaryKey,
|
||||
restoreKeys
|
||||
);
|
||||
if (!cacheKey) {
|
||||
core.info(
|
||||
`Cache not found for input keys: ${[
|
||||
primaryKey,
|
||||
...restoreKeys
|
||||
].join(", ")}`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const archivePath = path.join(
|
||||
await utils.createTempDirectory(),
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
);
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
// Store the matched cache key
|
||||
utils.setCacheState(cacheKey);
|
||||
|
||||
// Store the cache result
|
||||
utils.setCacheState(cacheEntry);
|
||||
|
||||
try {
|
||||
// Download the cache from the cache entry
|
||||
await cacheHttpClient.downloadCache(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.info(
|
||||
`Cache Size: ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B)`
|
||||
);
|
||||
|
||||
await extractTar(archivePath, compressionMethod);
|
||||
} finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
await utils.unlinkFile(archivePath);
|
||||
} catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(
|
||||
primaryKey,
|
||||
cacheEntry
|
||||
);
|
||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||
utils.setCacheHitOutput(isExactKeyMatch);
|
||||
|
||||
core.info(
|
||||
`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`
|
||||
);
|
||||
core.info(`Cache restored from key: ${cacheKey}`);
|
||||
} catch (error) {
|
||||
utils.logWarning(error.message);
|
||||
utils.setCacheHitOutput(false);
|
||||
if (error.name === cache.ValidationError.name) {
|
||||
throw error;
|
||||
} else {
|
||||
utils.logWarning(error.message);
|
||||
utils.setCacheHitOutput(false);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
core.setFailed(error.message);
|
||||
|
68
src/save.ts
68
src/save.ts
@ -1,9 +1,7 @@
|
||||
import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
import * as path from "path";
|
||||
|
||||
import * as cacheHttpClient from "./cacheHttpClient";
|
||||
import { Events, Inputs, State } from "./constants";
|
||||
import { createTar } from "./tar";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
@ -12,9 +10,7 @@ async function run(): Promise<void> {
|
||||
utils.logWarning(
|
||||
`Event Validation Error: The event type ${
|
||||
process.env[Events.Key]
|
||||
} is not supported. Only ${utils
|
||||
.getSupportedEvents()
|
||||
.join(", ")} events are supported at this time.`
|
||||
} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
@ -22,7 +18,7 @@ async function run(): Promise<void> {
|
||||
const state = utils.getCacheState();
|
||||
|
||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||
const primaryKey = core.getState(State.CacheKey);
|
||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
||||
if (!primaryKey) {
|
||||
utils.logWarning(`Error retrieving key from state.`);
|
||||
return;
|
||||
@ -35,53 +31,21 @@ async function run(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
const compressionMethod = await utils.getCompressionMethod();
|
||||
|
||||
core.debug("Reserving Cache");
|
||||
const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
|
||||
compressionMethod: compressionMethod
|
||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
||||
required: true
|
||||
});
|
||||
if (cacheId == -1) {
|
||||
core.info(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
return;
|
||||
|
||||
try {
|
||||
await cache.saveCache(cachePaths, primaryKey);
|
||||
} catch (error) {
|
||||
if (error.name === cache.ValidationError.name) {
|
||||
throw error;
|
||||
} else if (error.name === cache.ReserveCacheError.name) {
|
||||
core.info(error.message);
|
||||
} else {
|
||||
utils.logWarning(error.message);
|
||||
}
|
||||
}
|
||||
core.debug(`Cache ID: ${cacheId}`);
|
||||
const cachePaths = await utils.resolvePaths(
|
||||
core
|
||||
.getInput(Inputs.Path, { required: true })
|
||||
.split("\n")
|
||||
.filter(x => x !== "")
|
||||
);
|
||||
|
||||
core.debug("Cache Paths:");
|
||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||
|
||||
const archiveFolder = await utils.createTempDirectory();
|
||||
const archivePath = path.join(
|
||||
archiveFolder,
|
||||
utils.getCacheFileName(compressionMethod)
|
||||
);
|
||||
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
|
||||
await createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
utils.logWarning(
|
||||
`Cache size of ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath);
|
||||
} catch (error) {
|
||||
utils.logWarning(error.message);
|
||||
}
|
||||
|
87
src/tar.ts
87
src/tar.ts
@ -1,87 +0,0 @@
|
||||
import { exec } from "@actions/exec";
|
||||
import * as io from "@actions/io";
|
||||
import { existsSync, writeFileSync } from "fs";
|
||||
import * as path from "path";
|
||||
|
||||
import { CompressionMethod } from "./constants";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function getTarPath(args: string[]): Promise<string> {
|
||||
// Explicitly use BSD Tar on Windows
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
if (IS_WINDOWS) {
|
||||
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
||||
if (existsSync(systemTar)) {
|
||||
return systemTar;
|
||||
} else if (await utils.useGnuTar()) {
|
||||
args.push("--force-local");
|
||||
}
|
||||
}
|
||||
return await io.which("tar", true);
|
||||
}
|
||||
|
||||
async function execTar(args: string[], cwd?: string): Promise<void> {
|
||||
try {
|
||||
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
|
||||
} catch (error) {
|
||||
throw new Error(`Tar failed with error: ${error?.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
function getWorkingDirectory(): string {
|
||||
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||
}
|
||||
|
||||
export async function extractTar(
|
||||
archivePath: string,
|
||||
compressionMethod: CompressionMethod
|
||||
): Promise<void> {
|
||||
// Create directory to extract tar into
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
await io.mkdirP(workingDirectory);
|
||||
// --d: Decompress.
|
||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||
const args = [
|
||||
...(compressionMethod == CompressionMethod.Zstd
|
||||
? ["--use-compress-program", "zstd -d --long=30"]
|
||||
: ["-z"]),
|
||||
"-xf",
|
||||
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||
"-P",
|
||||
"-C",
|
||||
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||
];
|
||||
await execTar(args);
|
||||
}
|
||||
|
||||
export async function createTar(
|
||||
archiveFolder: string,
|
||||
sourceDirectories: string[],
|
||||
compressionMethod: CompressionMethod
|
||||
): Promise<void> {
|
||||
// Write source directories to manifest.txt to avoid command length limits
|
||||
const manifestFilename = "manifest.txt";
|
||||
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||
writeFileSync(
|
||||
path.join(archiveFolder, manifestFilename),
|
||||
sourceDirectories.join("\n")
|
||||
);
|
||||
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||
const workingDirectory = getWorkingDirectory();
|
||||
const args = [
|
||||
...(compressionMethod == CompressionMethod.Zstd
|
||||
? ["--use-compress-program", "zstd -T0 --long=30"]
|
||||
: ["-z"]),
|
||||
"-cf",
|
||||
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||
"-P",
|
||||
"-C",
|
||||
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||
"--files-from",
|
||||
manifestFilename
|
||||
];
|
||||
await execTar(args, archiveFolder);
|
||||
}
|
@ -1,86 +1,35 @@
|
||||
import * as core from "@actions/core";
|
||||
import * as exec from "@actions/exec";
|
||||
import * as glob from "@actions/glob";
|
||||
import * as io from "@actions/io";
|
||||
import * as fs from "fs";
|
||||
import * as path from "path";
|
||||
import * as util from "util";
|
||||
import * as uuidV4 from "uuid/v4";
|
||||
|
||||
import {
|
||||
CacheFilename,
|
||||
CompressionMethod,
|
||||
Events,
|
||||
Outputs,
|
||||
State
|
||||
} from "../constants";
|
||||
import { ArtifactCacheEntry } from "../contracts";
|
||||
import { Outputs, RefKey, State } from "../constants";
|
||||
|
||||
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
||||
export async function createTempDirectory(): Promise<string> {
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
|
||||
let tempDirectory: string = process.env["RUNNER_TEMP"] || "";
|
||||
|
||||
if (!tempDirectory) {
|
||||
let baseLocation: string;
|
||||
if (IS_WINDOWS) {
|
||||
// On Windows use the USERPROFILE env variable
|
||||
baseLocation = process.env["USERPROFILE"] || "C:\\";
|
||||
} else {
|
||||
if (process.platform === "darwin") {
|
||||
baseLocation = "/Users";
|
||||
} else {
|
||||
baseLocation = "/home";
|
||||
}
|
||||
}
|
||||
tempDirectory = path.join(baseLocation, "actions", "temp");
|
||||
}
|
||||
|
||||
const dest = path.join(tempDirectory, uuidV4.default());
|
||||
await io.mkdirP(dest);
|
||||
return dest;
|
||||
}
|
||||
|
||||
export function getArchiveFileSize(path: string): number {
|
||||
return fs.statSync(path).size;
|
||||
}
|
||||
|
||||
export function isExactKeyMatch(
|
||||
key: string,
|
||||
cacheResult?: ArtifactCacheEntry
|
||||
): boolean {
|
||||
export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
|
||||
return !!(
|
||||
cacheResult &&
|
||||
cacheResult.cacheKey &&
|
||||
cacheResult.cacheKey.localeCompare(key, undefined, {
|
||||
cacheKey &&
|
||||
cacheKey.localeCompare(key, undefined, {
|
||||
sensitivity: "accent"
|
||||
}) === 0
|
||||
);
|
||||
}
|
||||
|
||||
export function setCacheState(state: ArtifactCacheEntry): void {
|
||||
core.saveState(State.CacheResult, JSON.stringify(state));
|
||||
export function setCacheState(state: string): void {
|
||||
core.saveState(State.CacheMatchedKey, state);
|
||||
}
|
||||
|
||||
export function setCacheHitOutput(isCacheHit: boolean): void {
|
||||
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
||||
}
|
||||
|
||||
export function setOutputAndState(
|
||||
key: string,
|
||||
cacheResult?: ArtifactCacheEntry
|
||||
): void {
|
||||
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
|
||||
// Store the cache result if it exists
|
||||
cacheResult && setCacheState(cacheResult);
|
||||
export function setOutputAndState(key: string, cacheKey?: string): void {
|
||||
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
|
||||
// Store the matched cache key if it exists
|
||||
cacheKey && setCacheState(cacheKey);
|
||||
}
|
||||
|
||||
export function getCacheState(): ArtifactCacheEntry | undefined {
|
||||
const stateData = core.getState(State.CacheResult);
|
||||
core.debug(`State: ${stateData}`);
|
||||
if (stateData) {
|
||||
return JSON.parse(stateData) as ArtifactCacheEntry;
|
||||
export function getCacheState(): string | undefined {
|
||||
const cacheKey = core.getState(State.CacheMatchedKey);
|
||||
if (cacheKey) {
|
||||
core.debug(`Cache state/key: ${cacheKey}`);
|
||||
return cacheKey;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
@ -91,76 +40,19 @@ export function logWarning(message: string): void {
|
||||
core.info(`${warningPrefix}${message}`);
|
||||
}
|
||||
|
||||
export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
||||
const paths: string[] = [];
|
||||
const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||
const globber = await glob.create(patterns.join("\n"), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
|
||||
for await (const file of globber.globGenerator()) {
|
||||
const relativeFile = path.relative(workspace, file);
|
||||
core.debug(`Matched: ${relativeFile}`);
|
||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||
paths.push(`${relativeFile}`);
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
export function getSupportedEvents(): string[] {
|
||||
return [Events.Push, Events.PullRequest];
|
||||
}
|
||||
|
||||
// Currently the cache token is only authorized for push and pull_request events
|
||||
// All other events will fail when reading and saving the cache
|
||||
// Cache token authorized for all events that are tied to a ref
|
||||
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
||||
export function isValidEvent(): boolean {
|
||||
const githubEvent = process.env[Events.Key] || "";
|
||||
return getSupportedEvents().includes(githubEvent);
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
}
|
||||
|
||||
export function unlinkFile(path: fs.PathLike): Promise<void> {
|
||||
return util.promisify(fs.unlink)(path);
|
||||
}
|
||||
|
||||
async function getVersion(app: string): Promise<string> {
|
||||
core.debug(`Checking ${app} --version`);
|
||||
let versionOutput = "";
|
||||
try {
|
||||
await exec.exec(`${app} --version`, [], {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data: Buffer): string =>
|
||||
(versionOutput += data.toString()),
|
||||
stderr: (data: Buffer): string =>
|
||||
(versionOutput += data.toString())
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
core.debug(err.message);
|
||||
}
|
||||
|
||||
versionOutput = versionOutput.trim();
|
||||
core.debug(versionOutput);
|
||||
return versionOutput;
|
||||
}
|
||||
|
||||
export async function getCompressionMethod(): Promise<CompressionMethod> {
|
||||
const versionOutput = await getVersion("zstd");
|
||||
return versionOutput.toLowerCase().includes("zstd command line interface")
|
||||
? CompressionMethod.Zstd
|
||||
: CompressionMethod.Gzip;
|
||||
}
|
||||
|
||||
export function getCacheFileName(compressionMethod: CompressionMethod): string {
|
||||
return compressionMethod == CompressionMethod.Zstd
|
||||
? CacheFilename.Zstd
|
||||
: CacheFilename.Gzip;
|
||||
}
|
||||
|
||||
export async function useGnuTar(): Promise<boolean> {
|
||||
const versionOutput = await getVersion("tar");
|
||||
return versionOutput.toLowerCase().includes("gnu tar");
|
||||
export function getInputAsArray(
|
||||
name: string,
|
||||
options?: core.InputOptions
|
||||
): string[] {
|
||||
return core
|
||||
.getInput(name, options)
|
||||
.split("\n")
|
||||
.map(s => s.trim())
|
||||
.filter(x => x !== "");
|
||||
}
|
||||
|
Reference in New Issue
Block a user