mirror of
https://github.com/actions/cache.git
synced 2025-06-25 20:01:11 +02:00
Compare commits
5 Commits
revert-173
...
add-retrie
Author | SHA1 | Date | |
---|---|---|---|
c8d75a8073 | |||
a8b61326cf | |||
25b1a139de | |||
6efe05572d | |||
aced43a650 |
33
.github/workflows/codeql.yml
vendored
33
.github/workflows/codeql.yml
vendored
@ -1,29 +1,23 @@
|
|||||||
name: "Code scanning - action"
|
name: "Code Scanning - Action"
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
pull_request:
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '0 19 * * 0'
|
- cron: '0 0 * * 0'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
CodeQL-Build:
|
CodeQL-Build:
|
||||||
|
|
||||||
# CodeQL runs on ubuntu-latest and windows-latest
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
|
|
||||||
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
with:
|
|
||||||
# We must fetch at least the immediate parents so that if this is
|
|
||||||
# a pull request then we can checkout the head.
|
|
||||||
fetch-depth: 2
|
|
||||||
|
|
||||||
# If this run was triggered by a pull request event, then checkout
|
|
||||||
# the head of the pull request instead of the merge commit.
|
|
||||||
- run: git checkout HEAD^2
|
|
||||||
if: ${{ github.event_name == 'pull_request' }}
|
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
@ -33,20 +27,9 @@ jobs:
|
|||||||
# languages: go, javascript, csharp, python, cpp, java
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
|
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below).
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v1
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
|
||||||
# 📚 https://git.io/JvXDl
|
|
||||||
|
|
||||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
|
||||||
# and modify them (or add more) to build your code if your project
|
|
||||||
# uses a compiled language
|
|
||||||
|
|
||||||
#- run: |
|
|
||||||
# make bootstrap
|
|
||||||
# make release
|
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v1
|
uses: github/codeql-action/analyze@v1
|
||||||
|
19
.github/workflows/workflow.yml
vendored
19
.github/workflows/workflow.yml
vendored
@ -4,13 +4,11 @@ on:
|
|||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- releases/**
|
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
- releases/**
|
|
||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
|
|
||||||
@ -19,7 +17,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
@ -47,23 +45,12 @@ jobs:
|
|||||||
run: npm run lint
|
run: npm run lint
|
||||||
- name: Build & Test
|
- name: Build & Test
|
||||||
run: npm run test
|
run: npm run test
|
||||||
- name: Ensure dist/ folder is up-to-date
|
|
||||||
if: ${{ runner.os == 'Linux' }}
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
npm run build
|
|
||||||
if [ "$(git status --porcelain | wc -l)" -gt "0" ]; then
|
|
||||||
echo "Detected uncommitted changes after build. See status below:"
|
|
||||||
git diff
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
# End to end save and restore
|
# End to end save and restore
|
||||||
test-save:
|
test-save:
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
@ -86,7 +73,7 @@ jobs:
|
|||||||
needs: test-save
|
needs: test-save
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
|
69
README.md
69
README.md
@ -8,28 +8,6 @@ This action allows caching dependencies and build outputs to improve workflow ex
|
|||||||
|
|
||||||
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
|
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
|
||||||
|
|
||||||
## What's New
|
|
||||||
|
|
||||||
* Added support for multiple paths, [glob patterns](https://github.com/actions/toolkit/tree/master/packages/glob), and single file caches.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Cache multiple paths
|
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/cache
|
|
||||||
!~/cache/exclude
|
|
||||||
**/node_modules
|
|
||||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
|
||||||
```
|
|
||||||
|
|
||||||
* Increased performance and improved cache sizes using `zstd` compression for Linux and macOS runners
|
|
||||||
* Allowed caching for all events with a ref. See [events that trigger workflow](https://help.github.com/en/actions/reference/events-that-trigger-workflows) for info on which events do not have a `GITHUB_REF`
|
|
||||||
* Released the [`@actions/cache`](https://github.com/actions/toolkit/tree/master/packages/cache) npm package to allow other actions to utilize caching
|
|
||||||
* Added a best-effort cleanup step to delete the archive after extraction to reduce storage space
|
|
||||||
|
|
||||||
Refer [here](https://github.com/actions/cache/blob/v1/README.md) for previous versions
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
### Pre-requisites
|
### Pre-requisites
|
||||||
@ -37,7 +15,7 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
|
|||||||
|
|
||||||
### Inputs
|
### Inputs
|
||||||
|
|
||||||
* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/master/packages/glob) for supported patterns.
|
* `path` - A directory to store and save the cache
|
||||||
* `key` - An explicit key for restoring and saving the cache
|
* `key` - An explicit key for restoring and saving the cache
|
||||||
* `restore-keys` - An ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* `restore-keys` - An ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
||||||
|
|
||||||
@ -47,11 +25,6 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
|
|||||||
|
|
||||||
> See [Skipping steps based on cache-hit](#Skipping-steps-based-on-cache-hit) for info on using this output
|
> See [Skipping steps based on cache-hit](#Skipping-steps-based-on-cache-hit) for info on using this output
|
||||||
|
|
||||||
### Cache scopes
|
|
||||||
The cache is scoped to the key and branch. The default branch cache is available to other branches.
|
|
||||||
|
|
||||||
See [Matching a cache key](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key) for more info.
|
|
||||||
|
|
||||||
### Example workflow
|
### Example workflow
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
@ -68,7 +41,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Cache Primes
|
- name: Cache Primes
|
||||||
id: cache-primes
|
id: cache-primes
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: prime-numbers
|
path: prime-numbers
|
||||||
key: ${{ runner.os }}-primes
|
key: ${{ runner.os }}-primes
|
||||||
@ -88,7 +61,6 @@ Every programming language and framework has its own way of caching.
|
|||||||
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
||||||
|
|
||||||
- [C# - Nuget](./examples.md#c---nuget)
|
- [C# - Nuget](./examples.md#c---nuget)
|
||||||
- [D - DUB](./examples.md#d---dub)
|
|
||||||
- [Elixir - Mix](./examples.md#elixir---mix)
|
- [Elixir - Mix](./examples.md#elixir---mix)
|
||||||
- [Go - Modules](./examples.md#go---modules)
|
- [Go - Modules](./examples.md#go---modules)
|
||||||
- [Haskell - Cabal](./examples.md#haskell---cabal)
|
- [Haskell - Cabal](./examples.md#haskell---cabal)
|
||||||
@ -108,39 +80,6 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
|||||||
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
|
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
|
||||||
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
|
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
|
||||||
|
|
||||||
## Creating a cache key
|
|
||||||
|
|
||||||
A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions.
|
|
||||||
|
|
||||||
For example, using the [`hashFiles`](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles) function allows you to create a new cache when dependencies change.
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
path/to/dependencies
|
|
||||||
some/other/dependencies
|
|
||||||
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
|
|
||||||
```
|
|
||||||
|
|
||||||
Additionally, you can use arbitrary command output in a cache key, such as a date or software version:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# http://man7.org/linux/man-pages/man1/date.1.html
|
|
||||||
- name: Get Date
|
|
||||||
id: get-date
|
|
||||||
run: |
|
|
||||||
echo "::set-output name=date::$(/bin/date -u "+%Y%m%d")"
|
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: path/to/dependencies
|
|
||||||
key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }}
|
|
||||||
```
|
|
||||||
|
|
||||||
See [Using contexts to create cache keys](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#using-contexts-to-create-cache-keys)
|
|
||||||
|
|
||||||
## Cache Limits
|
## Cache Limits
|
||||||
|
|
||||||
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
||||||
@ -154,7 +93,7 @@ Example:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
id: cache
|
id: cache
|
||||||
with:
|
with:
|
||||||
path: path/to/dependencies
|
path: path/to/dependencies
|
||||||
@ -168,7 +107,7 @@ steps:
|
|||||||
> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outputs.cache-hit`)
|
> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outputs.cache-hit`)
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
We would love for you to contribute to `actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
|
We would love for you to contribute to `@actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
The scripts and documentation in this project are released under the [MIT License](LICENSE)
|
The scripts and documentation in this project are released under the [MIT License](LICENSE)
|
||||||
|
@ -1,72 +1,97 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
|
import * as os from "os";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
import { Events, Outputs, RefKey, State } from "../src/constants";
|
import { Events, Outputs, State } from "../src/constants";
|
||||||
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
|
||||||
|
import uuid = require("uuid");
|
||||||
|
|
||||||
jest.mock("@actions/core");
|
jest.mock("@actions/core");
|
||||||
|
jest.mock("os");
|
||||||
|
|
||||||
beforeAll(() => {
|
function getTempDir(): string {
|
||||||
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
|
return path.join(__dirname, "_temp", "actionUtils");
|
||||||
return jest.requireActual("@actions/core").getInput(name, options);
|
}
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
delete process.env[Events.Key];
|
delete process.env[Events.Key];
|
||||||
delete process.env[RefKey];
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isExactKeyMatch with undefined cache key returns false", () => {
|
afterAll(async () => {
|
||||||
const key = "linux-rust";
|
delete process.env["GITHUB_WORKSPACE"];
|
||||||
const cacheKey = undefined;
|
await io.rmRF(getTempDir());
|
||||||
|
|
||||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isExactKeyMatch with empty cache key returns false", () => {
|
test("getArchiveFileSize returns file size", () => {
|
||||||
const key = "linux-rust";
|
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
|
||||||
const cacheKey = "";
|
|
||||||
|
|
||||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
const size = actionUtils.getArchiveFileSize(filePath);
|
||||||
|
|
||||||
|
expect(size).toBe(11);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("isExactKeyMatch with undefined cache entry returns false", () => {
|
||||||
|
const key = "linux-rust";
|
||||||
|
const cacheEntry = undefined;
|
||||||
|
|
||||||
|
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("isExactKeyMatch with empty cache entry returns false", () => {
|
||||||
|
const key = "linux-rust";
|
||||||
|
const cacheEntry: ArtifactCacheEntry = {};
|
||||||
|
|
||||||
|
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isExactKeyMatch with different keys returns false", () => {
|
test("isExactKeyMatch with different keys returns false", () => {
|
||||||
const key = "linux-rust";
|
const key = "linux-rust";
|
||||||
const cacheKey = "linux-";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "linux-"
|
||||||
|
};
|
||||||
|
|
||||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isExactKeyMatch with different key accents returns false", () => {
|
test("isExactKeyMatch with different key accents returns false", () => {
|
||||||
const key = "linux-áccent";
|
const key = "linux-áccent";
|
||||||
const cacheKey = "linux-accent";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "linux-accent"
|
||||||
|
};
|
||||||
|
|
||||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
|
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isExactKeyMatch with same key returns true", () => {
|
test("isExactKeyMatch with same key returns true", () => {
|
||||||
const key = "linux-rust";
|
const key = "linux-rust";
|
||||||
const cacheKey = "linux-rust";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "linux-rust"
|
||||||
|
};
|
||||||
|
|
||||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
|
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isExactKeyMatch with same key and different casing returns true", () => {
|
test("isExactKeyMatch with same key and different casing returns true", () => {
|
||||||
const key = "linux-rust";
|
const key = "linux-rust";
|
||||||
const cacheKey = "LINUX-RUST";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "LINUX-RUST"
|
||||||
|
};
|
||||||
|
|
||||||
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
|
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("setOutputAndState with undefined entry to set cache-hit output", () => {
|
test("setOutputAndState with undefined entry to set cache-hit output", () => {
|
||||||
const key = "linux-rust";
|
const key = "linux-rust";
|
||||||
const cacheKey = undefined;
|
const cacheEntry = undefined;
|
||||||
|
|
||||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||||
const saveStateMock = jest.spyOn(core, "saveState");
|
const saveStateMock = jest.spyOn(core, "saveState");
|
||||||
|
|
||||||
actionUtils.setOutputAndState(key, cacheKey);
|
actionUtils.setOutputAndState(key, cacheEntry);
|
||||||
|
|
||||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
||||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||||
@ -76,33 +101,43 @@ test("setOutputAndState with undefined entry to set cache-hit output", () => {
|
|||||||
|
|
||||||
test("setOutputAndState with exact match to set cache-hit output and state", () => {
|
test("setOutputAndState with exact match to set cache-hit output and state", () => {
|
||||||
const key = "linux-rust";
|
const key = "linux-rust";
|
||||||
const cacheKey = "linux-rust";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "linux-rust"
|
||||||
|
};
|
||||||
|
|
||||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||||
const saveStateMock = jest.spyOn(core, "saveState");
|
const saveStateMock = jest.spyOn(core, "saveState");
|
||||||
|
|
||||||
actionUtils.setOutputAndState(key, cacheKey);
|
actionUtils.setOutputAndState(key, cacheEntry);
|
||||||
|
|
||||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
|
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
|
||||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
|
expect(saveStateMock).toHaveBeenCalledWith(
|
||||||
|
State.CacheResult,
|
||||||
|
JSON.stringify(cacheEntry)
|
||||||
|
);
|
||||||
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
|
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
|
||||||
const key = "linux-rust";
|
const key = "linux-rust";
|
||||||
const cacheKey = "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"
|
||||||
|
};
|
||||||
|
|
||||||
const setOutputMock = jest.spyOn(core, "setOutput");
|
const setOutputMock = jest.spyOn(core, "setOutput");
|
||||||
const saveStateMock = jest.spyOn(core, "saveState");
|
const saveStateMock = jest.spyOn(core, "saveState");
|
||||||
|
|
||||||
actionUtils.setOutputAndState(key, cacheKey);
|
actionUtils.setOutputAndState(key, cacheEntry);
|
||||||
|
|
||||||
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
|
||||||
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
expect(setOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
|
||||||
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
|
expect(saveStateMock).toHaveBeenCalledWith(
|
||||||
|
State.CacheResult,
|
||||||
|
JSON.stringify(cacheEntry)
|
||||||
|
);
|
||||||
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
expect(saveStateMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -116,23 +151,27 @@ test("getCacheState with no state returns undefined", () => {
|
|||||||
|
|
||||||
expect(state).toBe(undefined);
|
expect(state).toBe(undefined);
|
||||||
|
|
||||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
|
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
|
||||||
expect(getStateMock).toHaveBeenCalledTimes(1);
|
expect(getStateMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getCacheState with valid state", () => {
|
test("getCacheState with valid state", () => {
|
||||||
const cacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
const getStateMock = jest.spyOn(core, "getState");
|
const getStateMock = jest.spyOn(core, "getState");
|
||||||
getStateMock.mockImplementation(() => {
|
getStateMock.mockImplementation(() => {
|
||||||
return cacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
});
|
});
|
||||||
|
|
||||||
const state = actionUtils.getCacheState();
|
const state = actionUtils.getCacheState();
|
||||||
|
|
||||||
expect(state).toEqual(cacheKey);
|
expect(state).toEqual(cacheEntry);
|
||||||
|
|
||||||
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
|
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
|
||||||
expect(getStateMock).toHaveBeenCalledTimes(1);
|
expect(getStateMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -146,7 +185,7 @@ test("logWarning logs a message with a warning prefix", () => {
|
|||||||
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`);
|
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isValidEvent returns false for event that does not have a branch or tag", () => {
|
test("isValidEvent returns false for unknown event", () => {
|
||||||
const event = "foo";
|
const event = "foo";
|
||||||
process.env[Events.Key] = event;
|
process.env[Events.Key] = event;
|
||||||
|
|
||||||
@ -155,42 +194,164 @@ test("isValidEvent returns false for event that does not have a branch or tag",
|
|||||||
expect(isValidEvent).toBe(false);
|
expect(isValidEvent).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("isValidEvent returns true for event that has a ref", () => {
|
test("resolvePaths with no ~ in path", async () => {
|
||||||
|
const filePath = ".cache";
|
||||||
|
|
||||||
|
// Create the following layout:
|
||||||
|
// cwd
|
||||||
|
// cwd/.cache
|
||||||
|
// cwd/.cache/file.txt
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "no-tilde");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
const cache = path.join(root, ".cache");
|
||||||
|
await fs.mkdir(cache, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(cache, "file.txt"), "cached");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||||
|
|
||||||
|
const expectedPath = [filePath];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("resolvePaths with ~ in path", async () => {
|
||||||
|
const cacheDir = uuid();
|
||||||
|
const filePath = `~/${cacheDir}`;
|
||||||
|
// Create the following layout:
|
||||||
|
// ~/uuid
|
||||||
|
// ~/uuid/file.txt
|
||||||
|
|
||||||
|
const homedir = jest.requireActual("os").homedir();
|
||||||
|
const homedirMock = jest.spyOn(os, "homedir");
|
||||||
|
homedirMock.mockImplementation(() => {
|
||||||
|
return homedir;
|
||||||
|
});
|
||||||
|
|
||||||
|
const target = path.join(homedir, cacheDir);
|
||||||
|
await fs.mkdir(target, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(target, "file.txt"), "cached");
|
||||||
|
|
||||||
|
const root = getTempDir();
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||||
|
|
||||||
|
const expectedPath = [path.relative(root, target)];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
await io.rmRF(target);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("resolvePaths with home not found", async () => {
|
||||||
|
const filePath = "~/.cache/yarn";
|
||||||
|
const homedirMock = jest.spyOn(os, "homedir");
|
||||||
|
homedirMock.mockImplementation(() => {
|
||||||
|
return "";
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
|
||||||
|
"Unable to determine HOME directory"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("resolvePaths inclusion pattern returns found", async () => {
|
||||||
|
const pattern = "*.ts";
|
||||||
|
// Create the following layout:
|
||||||
|
// inclusion-patterns
|
||||||
|
// inclusion-patterns/miss.txt
|
||||||
|
// inclusion-patterns/test.ts
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "inclusion-patterns");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||||
|
await fs.writeFile(path.join(root, "test.ts"), "match");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([pattern]);
|
||||||
|
|
||||||
|
const expectedPath = ["test.ts"];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("resolvePaths exclusion pattern returns not found", async () => {
|
||||||
|
const patterns = ["*.ts", "!test.ts"];
|
||||||
|
// Create the following layout:
|
||||||
|
// exclusion-patterns
|
||||||
|
// exclusion-patterns/miss.txt
|
||||||
|
// exclusion-patterns/test.ts
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "exclusion-patterns");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||||
|
await fs.writeFile(path.join(root, "test.ts"), "no match");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths(patterns);
|
||||||
|
|
||||||
|
const expectedPath = [];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("isValidEvent returns true for push event", () => {
|
||||||
const event = Events.Push;
|
const event = Events.Push;
|
||||||
process.env[Events.Key] = event;
|
process.env[Events.Key] = event;
|
||||||
process.env[RefKey] = "ref/heads/feature";
|
|
||||||
|
|
||||||
const isValidEvent = actionUtils.isValidEvent();
|
const isValidEvent = actionUtils.isValidEvent();
|
||||||
|
|
||||||
expect(isValidEvent).toBe(true);
|
expect(isValidEvent).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getInputAsArray returns empty array if not required and missing", () => {
|
test("isValidEvent returns true for pull request event", () => {
|
||||||
expect(actionUtils.getInputAsArray("foo")).toEqual([]);
|
const event = Events.PullRequest;
|
||||||
|
process.env[Events.Key] = event;
|
||||||
|
|
||||||
|
const isValidEvent = actionUtils.isValidEvent();
|
||||||
|
|
||||||
|
expect(isValidEvent).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("getInputAsArray throws error if required and missing", () => {
|
test("unlinkFile unlinks file", async () => {
|
||||||
expect(() =>
|
const testDirectory = await fs.mkdtemp("unlinkFileTest");
|
||||||
actionUtils.getInputAsArray("foo", { required: true })
|
const testFile = path.join(testDirectory, "test.txt");
|
||||||
).toThrowError();
|
await fs.writeFile(testFile, "hello world");
|
||||||
});
|
|
||||||
|
|
||||||
test("getInputAsArray handles single line correctly", () => {
|
await actionUtils.unlinkFile(testFile);
|
||||||
testUtils.setInput("foo", "bar");
|
|
||||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getInputAsArray handles multiple lines correctly", () => {
|
// This should throw as testFile should not exist
|
||||||
testUtils.setInput("foo", "bar\nbaz");
|
await expect(fs.stat(testFile)).rejects.toThrow();
|
||||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getInputAsArray handles different new lines correctly", () => {
|
await fs.rmdir(testDirectory);
|
||||||
testUtils.setInput("foo", "bar\r\nbaz");
|
|
||||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("getInputAsArray handles empty lines correctly", () => {
|
|
||||||
testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n");
|
|
||||||
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
|
|
||||||
});
|
});
|
||||||
|
177
__tests__/cacheHttpsClient.test.ts
Normal file
177
__tests__/cacheHttpsClient.test.ts
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
import { getCacheVersion, retry } from "../src/cacheHttpClient";
|
||||||
|
import { CompressionMethod, Inputs } from "../src/constants";
|
||||||
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
testUtils.clearInputs();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with path input and compression method undefined returns version", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
|
||||||
|
const result = getCacheVersion();
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with zstd compression returns version", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
const result = getCacheVersion(CompressionMethod.Zstd);
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with gzip compression does not change vesion", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
const result = getCacheVersion(CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with no input throws", async () => {
|
||||||
|
expect(() => getCacheVersion()).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
interface TestResponse {
|
||||||
|
statusCode: number;
|
||||||
|
result: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleResponse(
|
||||||
|
response: TestResponse | undefined
|
||||||
|
): Promise<TestResponse> {
|
||||||
|
if (!response) {
|
||||||
|
fail("Retry method called too many times");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.statusCode === 999) {
|
||||||
|
throw Error("Test Error");
|
||||||
|
} else {
|
||||||
|
return Promise.resolve(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingResult(
|
||||||
|
responses: Array<TestResponse>,
|
||||||
|
expectedResult: string | null
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
const actualResult = await retry(
|
||||||
|
"test",
|
||||||
|
() => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(actualResult.result).toEqual(expectedResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingError(
|
||||||
|
responses: Array<TestResponse>
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
expect(
|
||||||
|
retry(
|
||||||
|
"test",
|
||||||
|
() => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
)
|
||||||
|
).rejects.toBeInstanceOf(Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
test("retry works on successful response", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry works after retryable status code", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry fails after exhausting retries", async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry fails after non-retryable status code", async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 500,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry works after error", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 999,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry returns after client error", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 400,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
null
|
||||||
|
);
|
||||||
|
});
|
@ -1,11 +1,21 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
import { Events, Inputs, RefKey } from "../src/constants";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Inputs
|
||||||
|
} from "../src/constants";
|
||||||
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/restore";
|
import run from "../src/restore";
|
||||||
|
import * as tar from "../src/tar";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
jest.mock("../src/cacheHttpClient");
|
||||||
|
jest.mock("../src/tar");
|
||||||
jest.mock("../src/utils/actionUtils");
|
jest.mock("../src/utils/actionUtils");
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
@ -21,23 +31,24 @@ beforeAll(() => {
|
|||||||
return actualUtils.isValidEvent();
|
return actualUtils.isValidEvent();
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
|
||||||
(name, options) => {
|
|
||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
return actualUtils.getInputAsArray(name, options);
|
return actualUtils.getSupportedEvents();
|
||||||
}
|
});
|
||||||
);
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getCacheFileName(cm);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
process.env[Events.Key] = Events.Push;
|
process.env[Events.Key] = Events.Push;
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
testUtils.clearInputs();
|
testUtils.clearInputs();
|
||||||
delete process.env[Events.Key];
|
delete process.env[Events.Key];
|
||||||
delete process.env[RefKey];
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with invalid event outputs warning", async () => {
|
test("restore with invalid event outputs warning", async () => {
|
||||||
@ -45,19 +56,16 @@ test("restore with invalid event outputs warning", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const invalidEvent = "commit_comment";
|
const invalidEvent = "commit_comment";
|
||||||
process.env[Events.Key] = invalidEvent;
|
process.env[Events.Key] = invalidEvent;
|
||||||
delete process.env[RefKey];
|
|
||||||
await run();
|
await run();
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with no path should fail", async () => {
|
test("restore with no path should fail", async () => {
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
|
||||||
await run();
|
await run();
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
|
||||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||||
expect(failedMock).not.toHaveBeenCalledWith(
|
expect(failedMock).not.toHaveBeenCalledWith(
|
||||||
"Input required and not supplied: path"
|
"Input required and not supplied: path"
|
||||||
@ -67,89 +75,71 @@ test("restore with no path should fail", async () => {
|
|||||||
test("restore with no key", async () => {
|
test("restore with no key", async () => {
|
||||||
testUtils.setInput(Inputs.Path, "node_modules");
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
|
||||||
await run();
|
await run();
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
"Input required and not supplied: key"
|
"Input required and not supplied: key"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with too many keys should fail", async () => {
|
test("restore with too many keys should fail", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key,
|
key,
|
||||||
restoreKeys
|
restoreKeys
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
|
||||||
await run();
|
await run();
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys);
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: Keys are limited to a maximum of 10.`
|
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with large key should fail", async () => {
|
test("restore with large key should fail", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "foo".repeat(512); // Over the 512 character limit
|
const key = "foo".repeat(512); // Over the 512 character limit
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
|
||||||
await run();
|
await run();
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with invalid key should fail", async () => {
|
test("restore with invalid key should fail", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "comma,comma";
|
const key = "comma,comma";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
});
|
});
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
|
||||||
await run();
|
await run();
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
expect(failedMock).toHaveBeenCalledWith(
|
||||||
`Key Validation Error: ${key} cannot contain commas.`
|
`Key Validation Error: ${key} cannot contain commas.`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with no cache found", async () => {
|
test("restore with no cache found", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||||
.mockImplementationOnce(() => {
|
clientMock.mockImplementation(() => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(null);
|
||||||
});
|
});
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
@ -159,28 +149,25 @@ test("restore with no cache found", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("restore with server error should fail", async () => {
|
test("restore with server error should fail", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
});
|
});
|
||||||
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||||
.mockImplementationOnce(() => {
|
clientMock.mockImplementation(() => {
|
||||||
throw new Error("HTTP Error Occurred");
|
throw new Error("HTTP Error Occurred");
|
||||||
});
|
});
|
||||||
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
@ -193,11 +180,10 @@ test("restore with server error should fail", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("restore with restore keys and no cache found", async () => {
|
test("restore with restore keys and no cache found", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const restoreKey = "node-";
|
const restoreKey = "node-";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey]
|
||||||
});
|
});
|
||||||
@ -205,17 +191,14 @@ test("restore with restore keys and no cache found", async () => {
|
|||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||||
.mockImplementationOnce(() => {
|
clientMock.mockImplementation(() => {
|
||||||
return Promise.resolve(undefined);
|
return Promise.resolve(null);
|
||||||
});
|
});
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
@ -224,43 +207,161 @@ test("restore with restore keys and no cache found", async () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with cache found for key", async () => {
|
test("restore with gzip compressed cache found", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
});
|
});
|
||||||
|
|
||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
const restoreCacheMock = jest
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
.spyOn(cache, "restoreCache")
|
cacheKey: key,
|
||||||
.mockImplementationOnce(() => {
|
scope: "refs/heads/master",
|
||||||
return Promise.resolve(key);
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||||
|
getCacheMock.mockImplementation(() => {
|
||||||
|
return Promise.resolve(cacheEntry);
|
||||||
});
|
});
|
||||||
|
const tempPath = "/foo/bar";
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(
|
||||||
|
actionUtils,
|
||||||
|
"createTempDirectory"
|
||||||
|
);
|
||||||
|
createTempDirectoryMock.mockImplementation(() => {
|
||||||
|
return Promise.resolve(tempPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
const archivePath = path.join(tempPath, CacheFilename.Gzip);
|
||||||
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
|
const fileSize = 142;
|
||||||
|
const getArchiveFileSizeMock = jest
|
||||||
|
.spyOn(actionUtils, "getArchiveFileSize")
|
||||||
|
.mockReturnValue(fileSize);
|
||||||
|
|
||||||
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
|
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
||||||
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
|
||||||
|
const compression = CompressionMethod.Gzip;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
|
cacheEntry.archiveLocation,
|
||||||
|
archivePath
|
||||||
|
);
|
||||||
|
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("restore with a pull request event and zstd compressed cache found", async () => {
|
||||||
|
const key = "node-test";
|
||||||
|
testUtils.setInputs({
|
||||||
|
path: "node_modules",
|
||||||
|
key
|
||||||
|
});
|
||||||
|
|
||||||
|
process.env[Events.Key] = Events.PullRequest;
|
||||||
|
|
||||||
|
const infoMock = jest.spyOn(core, "info");
|
||||||
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
|
|
||||||
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: key,
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||||
|
getCacheMock.mockImplementation(() => {
|
||||||
|
return Promise.resolve(cacheEntry);
|
||||||
|
});
|
||||||
|
const tempPath = "/foo/bar";
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(
|
||||||
|
actionUtils,
|
||||||
|
"createTempDirectory"
|
||||||
|
);
|
||||||
|
createTempDirectoryMock.mockImplementation(() => {
|
||||||
|
return Promise.resolve(tempPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||||
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
|
const fileSize = 62915000;
|
||||||
|
const getArchiveFileSizeMock = jest
|
||||||
|
.spyOn(actionUtils, "getArchiveFileSize")
|
||||||
|
.mockReturnValue(fileSize);
|
||||||
|
|
||||||
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
|
await run();
|
||||||
|
|
||||||
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
|
cacheEntry.archiveLocation,
|
||||||
|
archivePath
|
||||||
|
);
|
||||||
|
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||||
|
|
||||||
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||||
|
|
||||||
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||||
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with cache found for restore key", async () => {
|
test("restore with cache found for restore key", async () => {
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const restoreKey = "node-";
|
const restoreKey = "node-";
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: path,
|
path: "node_modules",
|
||||||
key,
|
key,
|
||||||
restoreKeys: [restoreKey]
|
restoreKeys: [restoreKey]
|
||||||
});
|
});
|
||||||
@ -268,19 +369,60 @@ test("restore with cache found for restore key", async () => {
|
|||||||
const infoMock = jest.spyOn(core, "info");
|
const infoMock = jest.spyOn(core, "info");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
const stateMock = jest.spyOn(core, "saveState");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
const restoreCacheMock = jest
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
.spyOn(cache, "restoreCache")
|
cacheKey: restoreKey,
|
||||||
.mockImplementationOnce(() => {
|
scope: "refs/heads/master",
|
||||||
return Promise.resolve(restoreKey);
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
|
||||||
|
getCacheMock.mockImplementation(() => {
|
||||||
|
return Promise.resolve(cacheEntry);
|
||||||
});
|
});
|
||||||
|
const tempPath = "/foo/bar";
|
||||||
|
|
||||||
|
const createTempDirectoryMock = jest.spyOn(
|
||||||
|
actionUtils,
|
||||||
|
"createTempDirectory"
|
||||||
|
);
|
||||||
|
createTempDirectoryMock.mockImplementation(() => {
|
||||||
|
return Promise.resolve(tempPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||||
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
|
const fileSize = 142;
|
||||||
|
const getArchiveFileSizeMock = jest
|
||||||
|
.spyOn(actionUtils, "getArchiveFileSize")
|
||||||
|
.mockReturnValue(fileSize);
|
||||||
|
|
||||||
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
|
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
|
cacheEntry.archiveLocation,
|
||||||
|
archivePath
|
||||||
|
);
|
||||||
|
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||||
|
|
||||||
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||||
|
|
||||||
@ -288,4 +430,5 @@ test("restore with cache found for restore key", async () => {
|
|||||||
`Cache restored from key: ${restoreKey}`
|
`Cache restored from key: ${restoreKey}`
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
@ -1,13 +1,22 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
import { Events, Inputs, RefKey } from "../src/constants";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Inputs
|
||||||
|
} from "../src/constants";
|
||||||
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/save";
|
import run from "../src/save";
|
||||||
|
import * as tar from "../src/tar";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
import * as testUtils from "../src/utils/testUtils";
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
jest.mock("@actions/core");
|
jest.mock("@actions/core");
|
||||||
jest.mock("@actions/cache");
|
jest.mock("../src/cacheHttpClient");
|
||||||
|
jest.mock("../src/tar");
|
||||||
jest.mock("../src/utils/actionUtils");
|
jest.mock("../src/utils/actionUtils");
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
@ -19,14 +28,6 @@ beforeAll(() => {
|
|||||||
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
return jest.requireActual("../src/utils/actionUtils").getCacheState();
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
|
|
||||||
(name, options) => {
|
|
||||||
return jest
|
|
||||||
.requireActual("../src/utils/actionUtils")
|
|
||||||
.getInputAsArray(name, options);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
(key, cacheResult) => {
|
(key, cacheResult) => {
|
||||||
return jest
|
return jest
|
||||||
@ -39,17 +40,35 @@ beforeAll(() => {
|
|||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
return actualUtils.isValidEvent();
|
return actualUtils.isValidEvent();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getSupportedEvents();
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
|
||||||
|
async filePaths => {
|
||||||
|
return filePaths.map(x => path.resolve(x));
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
||||||
|
return Promise.resolve("/foo/bar");
|
||||||
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getCacheFileName(cm);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
process.env[Events.Key] = Events.Push;
|
process.env[Events.Key] = Events.Push;
|
||||||
process.env[RefKey] = "refs/heads/feature-branch";
|
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
testUtils.clearInputs();
|
testUtils.clearInputs();
|
||||||
delete process.env[Events.Key];
|
delete process.env[Events.Key];
|
||||||
delete process.env[RefKey];
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with invalid event outputs warning", async () => {
|
test("save with invalid event outputs warning", async () => {
|
||||||
@ -57,10 +76,9 @@ test("save with invalid event outputs warning", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
const invalidEvent = "commit_comment";
|
const invalidEvent = "commit_comment";
|
||||||
process.env[Events.Key] = invalidEvent;
|
process.env[Events.Key] = invalidEvent;
|
||||||
delete process.env[RefKey];
|
|
||||||
await run();
|
await run();
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
@ -69,21 +87,25 @@ test("save with no primary key in state outputs warning", async () => {
|
|||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return "";
|
return "";
|
||||||
});
|
});
|
||||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
`Error retrieving key from state.`
|
`Error retrieving key from state.`
|
||||||
);
|
);
|
||||||
@ -96,25 +118,33 @@ test("save with exact match returns early", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
const savedCacheKey = primaryKey;
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: primaryKey,
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return primaryKey;
|
return primaryKey;
|
||||||
});
|
});
|
||||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
|
||||||
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -123,22 +153,25 @@ test("save with missing input outputs warning", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
const savedCacheKey = "Linux-node-";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return primaryKey;
|
return primaryKey;
|
||||||
});
|
});
|
||||||
const saveCacheMock = jest.spyOn(cache, "saveCache");
|
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
"Input required and not supplied: path"
|
"Input required and not supplied: path"
|
||||||
);
|
);
|
||||||
@ -151,12 +184,17 @@ test("save with large cache outputs warning", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
const savedCacheKey = "Linux-node-";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
@ -164,26 +202,36 @@ test("save with large cache outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const saveCacheMock = jest
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
.spyOn(cache, "saveCache")
|
|
||||||
.mockImplementationOnce(() => {
|
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
|
||||||
throw new Error(
|
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
return cacheSize;
|
||||||
);
|
|
||||||
});
|
});
|
||||||
|
const compression = CompressionMethod.Gzip;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
const archiveFolder = "/foo/bar";
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
|
||||||
|
|
||||||
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with reserve cache failure outputs warning", async () => {
|
test("save with reserve cache failure outputs warning", async () => {
|
||||||
@ -192,12 +240,17 @@ test("save with reserve cache failure outputs warning", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
const savedCacheKey = "Linux-node-";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
@ -207,26 +260,35 @@ test("save with reserve cache failure outputs warning", async () => {
|
|||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const saveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
.spyOn(cache, "saveCache")
|
.spyOn(cacheHttpClient, "reserveCache")
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
const actualCache = jest.requireActual("@actions/cache");
|
return Promise.resolve(-1);
|
||||||
const error = new actualCache.ReserveCacheError(
|
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
|
||||||
);
|
|
||||||
throw error;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
);
|
);
|
||||||
|
|
||||||
|
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with server error outputs warning", async () => {
|
test("save with server error outputs warning", async () => {
|
||||||
@ -234,12 +296,17 @@ test("save with server error outputs warning", async () => {
|
|||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
const savedCacheKey = "Linux-node-";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
@ -247,35 +314,70 @@ test("save with server error outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
|
const cacheId = 4;
|
||||||
|
const reserveCacheMock = jest
|
||||||
|
.spyOn(cacheHttpClient, "reserveCache")
|
||||||
|
.mockImplementationOnce(() => {
|
||||||
|
return Promise.resolve(cacheId);
|
||||||
|
});
|
||||||
|
|
||||||
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
const saveCacheMock = jest
|
const saveCacheMock = jest
|
||||||
.spyOn(cache, "saveCache")
|
.spyOn(cacheHttpClient, "saveCache")
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
throw new Error("HTTP Error Occurred");
|
throw new Error("HTTP Error Occurred");
|
||||||
});
|
});
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
|
const archiveFolder = "/foo/bar";
|
||||||
|
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||||
|
|
||||||
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with valid inputs uploads a cache", async () => {
|
test("save with valid inputs uploads a cache", async () => {
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||||
const savedCacheKey = "Linux-node-";
|
const cacheEntry: ArtifactCacheEntry = {
|
||||||
|
cacheKey: "Linux-node-",
|
||||||
|
scope: "refs/heads/master",
|
||||||
|
creationTime: "2019-11-13T19:18:02+00:00",
|
||||||
|
archiveLocation: "www.actionscache.test/download"
|
||||||
|
};
|
||||||
|
|
||||||
jest.spyOn(core, "getState")
|
jest.spyOn(core, "getState")
|
||||||
// Cache Entry State
|
// Cache Entry State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return savedCacheKey;
|
return JSON.stringify(cacheEntry);
|
||||||
})
|
})
|
||||||
// Cache Key State
|
// Cache Key State
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
@ -283,19 +385,44 @@ test("save with valid inputs uploads a cache", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const cacheId = 4;
|
const cacheId = 4;
|
||||||
const saveCacheMock = jest
|
const reserveCacheMock = jest
|
||||||
.spyOn(cache, "saveCache")
|
.spyOn(cacheHttpClient, "reserveCache")
|
||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
return Promise.resolve(cacheId);
|
return Promise.resolve(cacheId);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
|
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
|
const archiveFolder = "/foo/bar";
|
||||||
|
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||||
|
|
||||||
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
204
__tests__/tar.test.ts
Normal file
204
__tests__/tar.test.ts
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
import * as exec from "@actions/exec";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { CacheFilename, CompressionMethod } from "../src/constants";
|
||||||
|
import * as tar from "../src/tar";
|
||||||
|
import * as utils from "../src/utils/actionUtils";
|
||||||
|
|
||||||
|
import fs = require("fs");
|
||||||
|
|
||||||
|
jest.mock("@actions/exec");
|
||||||
|
jest.mock("@actions/io");
|
||||||
|
|
||||||
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
|
||||||
|
function getTempDir(): string {
|
||||||
|
return path.join(__dirname, "_temp", "tar");
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
jest.spyOn(io, "which").mockImplementation(tool => {
|
||||||
|
return Promise.resolve(tool);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.env["GITHUB_WORKSPACE"] = process.cwd();
|
||||||
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
delete process.env["GITHUB_WORKSPACE"];
|
||||||
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
|
});
|
||||||
|
|
||||||
|
test("zstd extract tar", async () => {
|
||||||
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
|
const archivePath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
|
: "cache.tar";
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, CompressionMethod.Zstd);
|
||||||
|
|
||||||
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"${tarPath}"`,
|
||||||
|
[
|
||||||
|
"--use-compress-program",
|
||||||
|
"zstd -d --long=30",
|
||||||
|
"-xf",
|
||||||
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip extract tar", async () => {
|
||||||
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
const archivePath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
|
: "cache.tar";
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"${tarPath}"`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-xf",
|
||||||
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip extract GNU tar on windows", async () => {
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
||||||
|
|
||||||
|
const isGnuMock = jest
|
||||||
|
.spyOn(utils, "useGnuTar")
|
||||||
|
.mockReturnValue(Promise.resolve(true));
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(isGnuMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"tar"`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-xf",
|
||||||
|
archivePath.replace(/\\/g, "/"),
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
workspace?.replace(/\\/g, "/"),
|
||||||
|
"--force-local"
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("zstd create tar", async () => {
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
|
const archiveFolder = getTempDir();
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||||
|
|
||||||
|
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
|
await tar.createTar(
|
||||||
|
archiveFolder,
|
||||||
|
sourceDirectories,
|
||||||
|
CompressionMethod.Zstd
|
||||||
|
);
|
||||||
|
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"${tarPath}"`,
|
||||||
|
[
|
||||||
|
"--use-compress-program",
|
||||||
|
"zstd -T0 --long=30",
|
||||||
|
"-cf",
|
||||||
|
IS_WINDOWS
|
||||||
|
? CacheFilename.Zstd.replace(/\\/g, "/")
|
||||||
|
: CacheFilename.Zstd,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||||
|
"--files-from",
|
||||||
|
"manifest.txt"
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: archiveFolder
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip create tar", async () => {
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
|
const archiveFolder = getTempDir();
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||||
|
|
||||||
|
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
|
await tar.createTar(
|
||||||
|
archiveFolder,
|
||||||
|
sourceDirectories,
|
||||||
|
CompressionMethod.Gzip
|
||||||
|
);
|
||||||
|
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"${tarPath}"`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-cf",
|
||||||
|
IS_WINDOWS
|
||||||
|
? CacheFilename.Gzip.replace(/\\/g, "/")
|
||||||
|
: CacheFilename.Gzip,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||||
|
"--files-from",
|
||||||
|
"manifest.txt"
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: archiveFolder
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
@ -3,7 +3,7 @@ description: 'Cache artifacts like dependencies and build outputs to improve wor
|
|||||||
author: 'GitHub'
|
author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
path:
|
path:
|
||||||
description: 'A list of files, directories, and wildcard patterns to cache and restore'
|
description: 'A directory to store and save the cache'
|
||||||
required: true
|
required: true
|
||||||
key:
|
key:
|
||||||
description: 'An explicit key for restoring and saving the cache'
|
description: 'An explicit key for restoring and saving the cache'
|
||||||
|
3366
dist/restore/index.js
vendored
3366
dist/restore/index.js
vendored
File diff suppressed because it is too large
Load Diff
3340
dist/save/index.js
vendored
3340
dist/save/index.js
vendored
File diff suppressed because it is too large
Load Diff
151
examples.md
151
examples.md
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
- [Examples](#examples)
|
- [Examples](#examples)
|
||||||
- [C# - NuGet](#c---nuget)
|
- [C# - NuGet](#c---nuget)
|
||||||
- [D - DUB](#d---dub)
|
|
||||||
- [Elixir - Mix](#elixir---mix)
|
- [Elixir - Mix](#elixir---mix)
|
||||||
- [Go - Modules](#go---modules)
|
- [Go - Modules](#go---modules)
|
||||||
- [Haskell - Cabal](#haskell---cabal)
|
- [Haskell - Cabal](#haskell---cabal)
|
||||||
@ -35,7 +34,7 @@
|
|||||||
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/.nuget/packages
|
path: ~/.nuget/packages
|
||||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||||
@ -44,25 +43,13 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
|
|||||||
```
|
```
|
||||||
|
|
||||||
Depending on the environment, huge packages might be pre-installed in the global cache folder.
|
Depending on the environment, huge packages might be pre-installed in the global cache folder.
|
||||||
With `actions/cache@v2` you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/master/packages/glob#exclude-patterns)
|
If you do not want to include them, consider to move the cache folder like below.
|
||||||
```yaml
|
|
||||||
- uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: |
|
|
||||||
~/.nuget/packages
|
|
||||||
!~/.nuget/packages/unwanted
|
|
||||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-nuget-
|
|
||||||
```
|
|
||||||
|
|
||||||
Or you could move the cache folder like below.
|
|
||||||
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
|
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
|
||||||
```yaml
|
```yaml
|
||||||
env:
|
env:
|
||||||
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
|
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ github.workspace }}/.nuget/packages
|
path: ${{ github.workspace }}/.nuget/packages
|
||||||
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
|
||||||
@ -70,33 +57,9 @@ steps:
|
|||||||
${{ runner.os }}-nuget-
|
${{ runner.os }}-nuget-
|
||||||
```
|
```
|
||||||
|
|
||||||
## D - DUB
|
|
||||||
|
|
||||||
### POSIX
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ~/.dub
|
|
||||||
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-dub-
|
|
||||||
```
|
|
||||||
|
|
||||||
### Windows
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- uses: actions/cache@v2
|
|
||||||
with:
|
|
||||||
path: ~\AppData\Local\dub
|
|
||||||
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-dub-
|
|
||||||
```
|
|
||||||
|
|
||||||
## Elixir - Mix
|
## Elixir - Mix
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: deps
|
path: deps
|
||||||
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
|
||||||
@ -107,7 +70,7 @@ steps:
|
|||||||
## Go - Modules
|
## Go - Modules
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/go/pkg/mod
|
path: ~/go/pkg/mod
|
||||||
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
|
||||||
@ -120,20 +83,27 @@ steps:
|
|||||||
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
|
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Cache ~/.cabal/packages, ~/.cabal/store and dist-newstyle
|
- uses: actions/cache@v1
|
||||||
uses: actions/cache@v2
|
name: Cache ~/.cabal/packages
|
||||||
with:
|
with:
|
||||||
path: |
|
path: ~/.cabal/packages
|
||||||
~/.cabal/packages
|
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
|
||||||
~/.cabal/store
|
- uses: actions/cache@v1
|
||||||
dist-newstyle
|
name: Cache ~/.cabal/store
|
||||||
key: ${{ runner.os }}-${{ matrix.ghc }}
|
with:
|
||||||
|
path: ~/.cabal/store
|
||||||
|
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
name: Cache dist-newstyle
|
||||||
|
with:
|
||||||
|
path: dist-newstyle
|
||||||
|
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
|
||||||
```
|
```
|
||||||
|
|
||||||
## Java - Gradle
|
## Java - Gradle
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/.gradle/caches
|
path: ~/.gradle/caches
|
||||||
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
|
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
|
||||||
@ -144,8 +114,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
## Java - Maven
|
## Java - Maven
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Cache local Maven repository
|
- uses: actions/cache@v1
|
||||||
uses: actions/cache@v2
|
|
||||||
with:
|
with:
|
||||||
path: ~/.m2/repository
|
path: ~/.m2/repository
|
||||||
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
|
||||||
@ -157,14 +126,12 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
|
|
||||||
For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache
|
For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache
|
||||||
|
|
||||||
If using `npm config` to retrieve the cache directory, ensure you run [actions/setup-node](https://github.com/actions/setup-node) first to ensure your `npm` version is correct.
|
|
||||||
|
|
||||||
>Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci`
|
>Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci`
|
||||||
|
|
||||||
### macOS and Ubuntu
|
### macOS and Ubuntu
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/.npm
|
path: ~/.npm
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
@ -179,7 +146,7 @@ If using `npm config` to retrieve the cache directory, ensure you run [actions/s
|
|||||||
id: npm-cache
|
id: npm-cache
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=dir::$(npm config get cache)"
|
echo "::set-output name=dir::$(npm config get cache)"
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.npm-cache.outputs.dir }}
|
path: ${{ steps.npm-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
@ -194,7 +161,7 @@ If using `npm config` to retrieve the cache directory, ensure you run [actions/s
|
|||||||
id: npm-cache
|
id: npm-cache
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=dir::$(npm config get cache)"
|
echo "::set-output name=dir::$(npm config get cache)"
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.npm-cache.outputs.dir }}
|
path: ${{ steps.npm-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
@ -204,9 +171,10 @@ If using `npm config` to retrieve the cache directory, ensure you run [actions/s
|
|||||||
|
|
||||||
## Node - Lerna
|
## Node - Lerna
|
||||||
|
|
||||||
|
>Note this example uses the new multi-paths feature and is only available at `master`
|
||||||
```yaml
|
```yaml
|
||||||
- name: restore lerna
|
- name: restore lerna
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@master
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
node_modules
|
node_modules
|
||||||
@ -222,7 +190,7 @@ The yarn cache directory will depend on your operating system and version of `ya
|
|||||||
id: yarn-cache-dir-path
|
id: yarn-cache-dir-path
|
||||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||||
@ -236,7 +204,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
|
|||||||
```yaml
|
```yaml
|
||||||
- name: Restore Cache
|
- name: Restore Cache
|
||||||
id: restore-cache
|
id: restore-cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: _export
|
path: _export
|
||||||
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
|
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
|
||||||
@ -266,7 +234,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
|
|||||||
id: composer-cache
|
id: composer-cache
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=dir::$(composer config cache-files-dir)"
|
echo "::set-output name=dir::$(composer config cache-files-dir)"
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.composer-cache.outputs.dir }}
|
path: ${{ steps.composer-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
|
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
|
||||||
@ -285,7 +253,7 @@ Locations:
|
|||||||
|
|
||||||
### Simple example
|
### Simple example
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pip
|
path: ~/.cache/pip
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||||
@ -298,7 +266,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||||||
### Multiple OS's in a workflow
|
### Multiple OS's in a workflow
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
if: startsWith(runner.os, 'Linux')
|
if: startsWith(runner.os, 'Linux')
|
||||||
with:
|
with:
|
||||||
path: ~/.cache/pip
|
path: ~/.cache/pip
|
||||||
@ -306,7 +274,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
if: startsWith(runner.os, 'macOS')
|
if: startsWith(runner.os, 'macOS')
|
||||||
with:
|
with:
|
||||||
path: ~/Library/Caches/pip
|
path: ~/Library/Caches/pip
|
||||||
@ -314,7 +282,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
if: startsWith(runner.os, 'Windows')
|
if: startsWith(runner.os, 'Windows')
|
||||||
with:
|
with:
|
||||||
path: ~\AppData\Local\pip\Cache
|
path: ~\AppData\Local\pip\Cache
|
||||||
@ -333,7 +301,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||||||
echo "::set-output name=dir::$(pip cache dir)"
|
echo "::set-output name=dir::$(pip cache dir)"
|
||||||
|
|
||||||
- name: pip cache
|
- name: pip cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||||
@ -350,7 +318,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||||||
run: |
|
run: |
|
||||||
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
|
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.pip-cache.outputs.dir }}
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||||
@ -369,7 +337,7 @@ Locations:
|
|||||||
|
|
||||||
### Simple example
|
### Simple example
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/renv
|
path: ~/.local/share/renv
|
||||||
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||||
@ -382,7 +350,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
|||||||
### Multiple OS's in a workflow
|
### Multiple OS's in a workflow
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
if: startsWith(runner.os, 'Linux')
|
if: startsWith(runner.os, 'Linux')
|
||||||
with:
|
with:
|
||||||
path: ~/.local/share/renv
|
path: ~/.local/share/renv
|
||||||
@ -390,7 +358,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-renv-
|
${{ runner.os }}-renv-
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
if: startsWith(runner.os, 'macOS')
|
if: startsWith(runner.os, 'macOS')
|
||||||
with:
|
with:
|
||||||
path: ~/Library/Application Support/renv
|
path: ~/Library/Application Support/renv
|
||||||
@ -398,7 +366,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-renv-
|
${{ runner.os }}-renv-
|
||||||
|
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
if: startsWith(runner.os, 'Windows')
|
if: startsWith(runner.os, 'Windows')
|
||||||
with:
|
with:
|
||||||
path: ~\AppData\Local\renv
|
path: ~\AppData\Local\renv
|
||||||
@ -410,7 +378,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
|||||||
## Ruby - Bundler
|
## Ruby - Bundler
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: vendor/bundle
|
path: vendor/bundle
|
||||||
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
|
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
|
||||||
@ -429,31 +397,42 @@ When dependencies are installed later in the workflow, we must specify the same
|
|||||||
## Rust - Cargo
|
## Rust - Cargo
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- name: Cache cargo registry
|
||||||
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: |
|
path: ~/.cargo/registry
|
||||||
~/.cargo/registry
|
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
|
||||||
~/.cargo/git
|
- name: Cache cargo index
|
||||||
target
|
uses: actions/cache@v1
|
||||||
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
|
with:
|
||||||
|
path: ~/.cargo/git
|
||||||
|
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||||
|
- name: Cache cargo build
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: target
|
||||||
|
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Scala - SBT
|
## Scala - SBT
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Cache SBT
|
- name: Cache SBT ivy cache
|
||||||
uses: actions/cache@v2
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: |
|
path: ~/.ivy2/cache
|
||||||
~/.ivy2/cache
|
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
|
||||||
~/.sbt
|
- name: Cache SBT
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ~/.sbt
|
||||||
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
|
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Swift, Objective-C - Carthage
|
## Swift, Objective-C - Carthage
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: Carthage
|
path: Carthage
|
||||||
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
|
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
|
||||||
@ -464,7 +443,7 @@ When dependencies are installed later in the workflow, we must specify the same
|
|||||||
## Swift, Objective-C - CocoaPods
|
## Swift, Objective-C - CocoaPods
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: Pods
|
path: Pods
|
||||||
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
|
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
|
||||||
@ -475,7 +454,7 @@ When dependencies are installed later in the workflow, we must specify the same
|
|||||||
## Swift - Swift Package Manager
|
## Swift - Swift Package Manager
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v2
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: .build
|
path: .build
|
||||||
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
|
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
|
||||||
|
6185
package-lock.json
generated
6185
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -25,13 +25,16 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.0",
|
"@actions/core": "^1.2.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
"@actions/glob": "^0.1.0",
|
||||||
|
"@actions/http-client": "^1.0.8",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"@actions/cache": "^0.2.1"
|
"uuid": "^3.3.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^24.0.13",
|
"@types/jest": "^24.0.13",
|
||||||
"@types/nock": "^11.1.0",
|
"@types/nock": "^11.1.0",
|
||||||
"@types/node": "^12.0.4",
|
"@types/node": "^12.0.4",
|
||||||
|
"@types/uuid": "^3.4.5",
|
||||||
"@typescript-eslint/eslint-plugin": "^2.7.0",
|
"@typescript-eslint/eslint-plugin": "^2.7.0",
|
||||||
"@typescript-eslint/parser": "^2.7.0",
|
"@typescript-eslint/parser": "^2.7.0",
|
||||||
"@zeit/ncc": "^0.20.5",
|
"@zeit/ncc": "^0.20.5",
|
||||||
|
420
src/cacheHttpClient.ts
Normal file
420
src/cacheHttpClient.ts
Normal file
@ -0,0 +1,420 @@
|
|||||||
|
import * as core from "@actions/core";
|
||||||
|
import { HttpClient, HttpCodes } from "@actions/http-client";
|
||||||
|
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
||||||
|
import {
|
||||||
|
IHttpClientResponse,
|
||||||
|
IRequestOptions,
|
||||||
|
ITypedResponse
|
||||||
|
} from "@actions/http-client/interfaces";
|
||||||
|
import * as crypto from "crypto";
|
||||||
|
import * as fs from "fs";
|
||||||
|
import * as stream from "stream";
|
||||||
|
import * as util from "util";
|
||||||
|
|
||||||
|
import { CompressionMethod, Inputs, SocketTimeout } from "./constants";
|
||||||
|
import {
|
||||||
|
ArtifactCacheEntry,
|
||||||
|
CacheOptions,
|
||||||
|
CommitCacheRequest,
|
||||||
|
ReserveCacheRequest,
|
||||||
|
ReserveCacheResponse
|
||||||
|
} from "./contracts";
|
||||||
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
const versionSalt = "1.0";
|
||||||
|
|
||||||
|
function isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return statusCode >= 200 && statusCode < 300;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isServerErrorStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return statusCode >= 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const retryableStatusCodes = [
|
||||||
|
HttpCodes.BadGateway,
|
||||||
|
HttpCodes.ServiceUnavailable,
|
||||||
|
HttpCodes.GatewayTimeout
|
||||||
|
];
|
||||||
|
return retryableStatusCodes.includes(statusCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getCacheApiUrl(resource: string): string {
|
||||||
|
// Ideally we just use ACTIONS_CACHE_URL
|
||||||
|
const baseUrl: string = (
|
||||||
|
process.env["ACTIONS_CACHE_URL"] ||
|
||||||
|
process.env["ACTIONS_RUNTIME_URL"] ||
|
||||||
|
""
|
||||||
|
).replace("pipelines", "artifactcache");
|
||||||
|
if (!baseUrl) {
|
||||||
|
throw new Error(
|
||||||
|
"Cache Service Url not found, unable to restore cache."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
||||||
|
core.debug(`Resource Url: ${url}`);
|
||||||
|
return url;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createAcceptHeader(type: string, apiVersion: string): string {
|
||||||
|
return `${type};api-version=${apiVersion}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRequestOptions(): IRequestOptions {
|
||||||
|
const requestOptions: IRequestOptions = {
|
||||||
|
headers: {
|
||||||
|
Accept: createAcceptHeader("application/json", "6.0-preview.1")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return requestOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createHttpClient(): HttpClient {
|
||||||
|
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||||
|
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||||
|
|
||||||
|
return new HttpClient(
|
||||||
|
"actions/cache",
|
||||||
|
[bearerCredentialHandler],
|
||||||
|
getRequestOptions()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
|
||||||
|
const components = [core.getInput(Inputs.Path, { required: true })].concat(
|
||||||
|
compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : []
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
|
components.push(versionSalt);
|
||||||
|
|
||||||
|
return crypto
|
||||||
|
.createHash("sha256")
|
||||||
|
.update(components.join("|"))
|
||||||
|
.digest("hex");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retry<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<T>,
|
||||||
|
getStatusCode: (T) => number | undefined,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<T> {
|
||||||
|
let response: T | undefined = undefined;
|
||||||
|
let statusCode: number | undefined = undefined;
|
||||||
|
let isRetryable = false;
|
||||||
|
let errorMessage = "";
|
||||||
|
let attempt = 1;
|
||||||
|
|
||||||
|
while (attempt <= maxAttempts) {
|
||||||
|
try {
|
||||||
|
response = await method();
|
||||||
|
statusCode = getStatusCode(response);
|
||||||
|
|
||||||
|
if (!isServerErrorStatusCode(statusCode)) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
isRetryable = isRetryableStatusCode(statusCode);
|
||||||
|
errorMessage = `Cache service responded with ${statusCode}`;
|
||||||
|
} catch (error) {
|
||||||
|
isRetryable = true;
|
||||||
|
errorMessage = error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isRetryable) {
|
||||||
|
core.debug(`${name} - Error is not retryable`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
attempt++;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error(`${name} failed: ${errorMessage}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryTypedResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<ITypedResponse<T>>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<ITypedResponse<T>> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: ITypedResponse<T>) => response.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryHttpClientResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<IHttpClientResponse>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<IHttpClientResponse> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: IHttpClientResponse) => response.message.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCacheEntry(
|
||||||
|
keys: string[],
|
||||||
|
options?: CacheOptions
|
||||||
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
|
const httpClient = createHttpClient();
|
||||||
|
const version = getCacheVersion(options?.compressionMethod);
|
||||||
|
const resource = `cache?keys=${encodeURIComponent(
|
||||||
|
keys.join(",")
|
||||||
|
)}&version=${version}`;
|
||||||
|
|
||||||
|
const response = await retryTypedResponse("getCacheEntry", () =>
|
||||||
|
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
|
||||||
|
);
|
||||||
|
|
||||||
|
if (response.statusCode === 204) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheResult = response.result;
|
||||||
|
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
||||||
|
if (!cacheDownloadUrl) {
|
||||||
|
throw new Error("Cache not found.");
|
||||||
|
}
|
||||||
|
core.setSecret(cacheDownloadUrl);
|
||||||
|
core.debug(`Cache Result:`);
|
||||||
|
core.debug(JSON.stringify(cacheResult));
|
||||||
|
|
||||||
|
return cacheResult;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function pipeResponseToStream(
|
||||||
|
response: IHttpClientResponse,
|
||||||
|
output: NodeJS.WritableStream
|
||||||
|
): Promise<void> {
|
||||||
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
|
await pipeline(response.message, output);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadCache(
|
||||||
|
archiveLocation: string,
|
||||||
|
archivePath: string
|
||||||
|
): Promise<void> {
|
||||||
|
const stream = fs.createWriteStream(archivePath);
|
||||||
|
const httpClient = new HttpClient("actions/cache");
|
||||||
|
const downloadResponse = await retryHttpClientResponse(
|
||||||
|
"downloadCache",
|
||||||
|
() => httpClient.get(archiveLocation)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Abort download if no traffic received over the socket.
|
||||||
|
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
||||||
|
downloadResponse.message.destroy();
|
||||||
|
core.debug(
|
||||||
|
`Aborting download, socket timed out after ${SocketTimeout} ms`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
await pipeResponseToStream(downloadResponse, stream);
|
||||||
|
|
||||||
|
// Validate download size.
|
||||||
|
const contentLengthHeader =
|
||||||
|
downloadResponse.message.headers["content-length"];
|
||||||
|
|
||||||
|
if (contentLengthHeader) {
|
||||||
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
|
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
||||||
|
if (actualLength != expectedLength) {
|
||||||
|
throw new Error(
|
||||||
|
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
core.debug("Unable to validate download, no Content-Length header");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reserve Cache
|
||||||
|
export async function reserveCache(
|
||||||
|
key: string,
|
||||||
|
options?: CacheOptions
|
||||||
|
): Promise<number> {
|
||||||
|
const httpClient = createHttpClient();
|
||||||
|
const version = getCacheVersion(options?.compressionMethod);
|
||||||
|
|
||||||
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
|
key,
|
||||||
|
version
|
||||||
|
};
|
||||||
|
const response = await retryTypedResponse("reserveCache", () =>
|
||||||
|
httpClient.postJson<ReserveCacheResponse>(
|
||||||
|
getCacheApiUrl("caches"),
|
||||||
|
reserveCacheRequest
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
return response?.result?.cacheId ?? -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getContentRange(start: number, end: number): string {
|
||||||
|
// Format: `bytes start-end/filesize
|
||||||
|
// start and end are inclusive
|
||||||
|
// filesize can be *
|
||||||
|
// For a 200 byte chunk starting at byte 0:
|
||||||
|
// Content-Range: bytes 0-199/*
|
||||||
|
return `bytes ${start}-${end}/*`;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadChunk(
|
||||||
|
httpClient: HttpClient,
|
||||||
|
resourceUrl: string,
|
||||||
|
openStream: () => NodeJS.ReadableStream,
|
||||||
|
start: number,
|
||||||
|
end: number
|
||||||
|
): Promise<void> {
|
||||||
|
core.debug(
|
||||||
|
`Uploading chunk of size ${end -
|
||||||
|
start +
|
||||||
|
1} bytes at offset ${start} with content range: ${getContentRange(
|
||||||
|
start,
|
||||||
|
end
|
||||||
|
)}`
|
||||||
|
);
|
||||||
|
const additionalHeaders = {
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
"Content-Range": getContentRange(start, end)
|
||||||
|
};
|
||||||
|
|
||||||
|
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
|
||||||
|
return await httpClient.sendStream(
|
||||||
|
"PATCH",
|
||||||
|
resourceUrl,
|
||||||
|
openStream(),
|
||||||
|
additionalHeaders
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
await retryHttpClientResponse(
|
||||||
|
`uploadChunk (start: ${start}, end: ${end})`,
|
||||||
|
uploadChunkRequest
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseEnvNumber(key: string): number | undefined {
|
||||||
|
const value = Number(process.env[key]);
|
||||||
|
if (Number.isNaN(value) || value < 0) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadFile(
|
||||||
|
httpClient: HttpClient,
|
||||||
|
cacheId: number,
|
||||||
|
archivePath: string
|
||||||
|
): Promise<void> {
|
||||||
|
// Upload Chunks
|
||||||
|
const fileSize = fs.statSync(archivePath).size;
|
||||||
|
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||||
|
const fd = fs.openSync(archivePath, "r");
|
||||||
|
|
||||||
|
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
|
||||||
|
const MAX_CHUNK_SIZE =
|
||||||
|
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
|
||||||
|
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
||||||
|
|
||||||
|
const parallelUploads = [...new Array(concurrency).keys()];
|
||||||
|
core.debug("Awaiting all uploads");
|
||||||
|
let offset = 0;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await Promise.all(
|
||||||
|
parallelUploads.map(async () => {
|
||||||
|
while (offset < fileSize) {
|
||||||
|
const chunkSize = Math.min(
|
||||||
|
fileSize - offset,
|
||||||
|
MAX_CHUNK_SIZE
|
||||||
|
);
|
||||||
|
const start = offset;
|
||||||
|
const end = offset + chunkSize - 1;
|
||||||
|
offset += MAX_CHUNK_SIZE;
|
||||||
|
|
||||||
|
await uploadChunk(
|
||||||
|
httpClient,
|
||||||
|
resourceUrl,
|
||||||
|
() =>
|
||||||
|
fs.createReadStream(archivePath, {
|
||||||
|
fd,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
autoClose: false
|
||||||
|
}),
|
||||||
|
start,
|
||||||
|
end
|
||||||
|
);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.closeSync(fd);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function commitCache(
|
||||||
|
httpClient: HttpClient,
|
||||||
|
cacheId: number,
|
||||||
|
filesize: number
|
||||||
|
): Promise<ITypedResponse<null>> {
|
||||||
|
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
||||||
|
return await retryTypedResponse("commitCache", () =>
|
||||||
|
httpClient.postJson<null>(
|
||||||
|
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||||
|
commitCacheRequest
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveCache(
|
||||||
|
cacheId: number,
|
||||||
|
archivePath: string
|
||||||
|
): Promise<void> {
|
||||||
|
const httpClient = createHttpClient();
|
||||||
|
|
||||||
|
core.debug("Upload cache");
|
||||||
|
await uploadFile(httpClient, cacheId, archivePath);
|
||||||
|
|
||||||
|
// Commit Cache
|
||||||
|
core.debug("Commiting cache");
|
||||||
|
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
const commitCacheResponse = await commitCache(
|
||||||
|
httpClient,
|
||||||
|
cacheId,
|
||||||
|
cacheSize
|
||||||
|
);
|
||||||
|
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||||
|
throw new Error(
|
||||||
|
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
core.info("Cache saved successfully");
|
||||||
|
}
|
@ -9,8 +9,8 @@ export enum Outputs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export enum State {
|
export enum State {
|
||||||
CachePrimaryKey = "CACHE_KEY",
|
CacheKey = "CACHE_KEY",
|
||||||
CacheMatchedKey = "CACHE_RESULT"
|
CacheResult = "CACHE_RESULT"
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum Events {
|
export enum Events {
|
||||||
@ -19,4 +19,17 @@ export enum Events {
|
|||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
export const RefKey = "GITHUB_REF";
|
export enum CacheFilename {
|
||||||
|
Gzip = "cache.tgz",
|
||||||
|
Zstd = "cache.tzst"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum CompressionMethod {
|
||||||
|
Gzip = "gzip",
|
||||||
|
Zstd = "zstd"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
|
// is aborted.
|
||||||
|
export const SocketTimeout = 5000;
|
||||||
|
25
src/contracts.d.ts
vendored
Normal file
25
src/contracts.d.ts
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import { CompressionMethod } from "./constants";
|
||||||
|
|
||||||
|
export interface ArtifactCacheEntry {
|
||||||
|
cacheKey?: string;
|
||||||
|
scope?: string;
|
||||||
|
creationTime?: string;
|
||||||
|
archiveLocation?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CommitCacheRequest {
|
||||||
|
size: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReserveCacheRequest {
|
||||||
|
key: string;
|
||||||
|
version?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ReserveCacheResponse {
|
||||||
|
cacheId: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface CacheOptions {
|
||||||
|
compressionMethod?: CompressionMethod;
|
||||||
|
}
|
110
src/restore.ts
110
src/restore.ts
@ -1,7 +1,9 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import * as cacheHttpClient from "./cacheHttpClient";
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
|
import { extractTar } from "./tar";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
async function run(): Promise<void> {
|
async function run(): Promise<void> {
|
||||||
@ -11,50 +13,104 @@ async function run(): Promise<void> {
|
|||||||
utils.logWarning(
|
utils.logWarning(
|
||||||
`Event Validation Error: The event type ${
|
`Event Validation Error: The event type ${
|
||||||
process.env[Events.Key]
|
process.env[Events.Key]
|
||||||
} is not supported because it's not tied to a branch or tag ref.`
|
} is not supported. Only ${utils
|
||||||
|
.getSupportedEvents()
|
||||||
|
.join(", ")} events are supported at this time.`
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||||
core.saveState(State.CachePrimaryKey, primaryKey);
|
core.saveState(State.CacheKey, primaryKey);
|
||||||
|
|
||||||
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
|
const restoreKeys = core
|
||||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
.getInput(Inputs.RestoreKeys)
|
||||||
required: true
|
.split("\n")
|
||||||
});
|
.filter(x => x !== "");
|
||||||
|
const keys = [primaryKey, ...restoreKeys];
|
||||||
|
|
||||||
try {
|
core.debug("Resolved Keys:");
|
||||||
const cacheKey = await cache.restoreCache(
|
core.debug(JSON.stringify(keys));
|
||||||
cachePaths,
|
|
||||||
primaryKey,
|
if (keys.length > 10) {
|
||||||
restoreKeys
|
core.setFailed(
|
||||||
);
|
`Key Validation Error: Keys are limited to a maximum of 10.`
|
||||||
if (!cacheKey) {
|
|
||||||
core.info(
|
|
||||||
`Cache not found for input keys: ${[
|
|
||||||
primaryKey,
|
|
||||||
...restoreKeys
|
|
||||||
].join(", ")}`
|
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
for (const key of keys) {
|
||||||
|
if (key.length > 512) {
|
||||||
|
core.setFailed(
|
||||||
|
`Key Validation Error: ${key} cannot be larger than 512 characters.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const regex = /^[^,]*$/;
|
||||||
|
if (!regex.test(key)) {
|
||||||
|
core.setFailed(
|
||||||
|
`Key Validation Error: ${key} cannot contain commas.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Store the matched cache key
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
utils.setCacheState(cacheKey);
|
|
||||||
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
try {
|
||||||
|
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
|
if (!cacheEntry?.archiveLocation) {
|
||||||
|
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const archivePath = path.join(
|
||||||
|
await utils.createTempDirectory(),
|
||||||
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
);
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
|
// Store the cache result
|
||||||
|
utils.setCacheState(cacheEntry);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Download the cache from the cache entry
|
||||||
|
await cacheHttpClient.downloadCache(
|
||||||
|
cacheEntry.archiveLocation,
|
||||||
|
archivePath
|
||||||
|
);
|
||||||
|
|
||||||
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
core.info(
|
||||||
|
`Cache Size: ~${Math.round(
|
||||||
|
archiveFileSize / (1024 * 1024)
|
||||||
|
)} MB (${archiveFileSize} B)`
|
||||||
|
);
|
||||||
|
|
||||||
|
await extractTar(archivePath, compressionMethod);
|
||||||
|
} finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
await utils.unlinkFile(archivePath);
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isExactKeyMatch = utils.isExactKeyMatch(
|
||||||
|
primaryKey,
|
||||||
|
cacheEntry
|
||||||
|
);
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(
|
||||||
|
`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`
|
||||||
|
);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
if (error.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
} else {
|
|
||||||
utils.logWarning(error.message);
|
utils.logWarning(error.message);
|
||||||
utils.setCacheHitOutput(false);
|
utils.setCacheHitOutput(false);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
}
|
}
|
||||||
|
66
src/save.ts
66
src/save.ts
@ -1,7 +1,9 @@
|
|||||||
import * as cache from "@actions/cache";
|
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import * as cacheHttpClient from "./cacheHttpClient";
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
|
import { createTar } from "./tar";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
async function run(): Promise<void> {
|
async function run(): Promise<void> {
|
||||||
@ -10,7 +12,9 @@ async function run(): Promise<void> {
|
|||||||
utils.logWarning(
|
utils.logWarning(
|
||||||
`Event Validation Error: The event type ${
|
`Event Validation Error: The event type ${
|
||||||
process.env[Events.Key]
|
process.env[Events.Key]
|
||||||
} is not supported because it's not tied to a branch or tag ref.`
|
} is not supported. Only ${utils
|
||||||
|
.getSupportedEvents()
|
||||||
|
.join(", ")} events are supported at this time.`
|
||||||
);
|
);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -18,7 +22,7 @@ async function run(): Promise<void> {
|
|||||||
const state = utils.getCacheState();
|
const state = utils.getCacheState();
|
||||||
|
|
||||||
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
// Inputs are re-evaluted before the post action, so we want the original key used for restore
|
||||||
const primaryKey = core.getState(State.CachePrimaryKey);
|
const primaryKey = core.getState(State.CacheKey);
|
||||||
if (!primaryKey) {
|
if (!primaryKey) {
|
||||||
utils.logWarning(`Error retrieving key from state.`);
|
utils.logWarning(`Error retrieving key from state.`);
|
||||||
return;
|
return;
|
||||||
@ -31,21 +35,53 @@ async function run(): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachePaths = utils.getInputAsArray(Inputs.Path, {
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
required: true
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
core.debug("Reserving Cache");
|
||||||
await cache.saveCache(cachePaths, primaryKey);
|
const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
|
||||||
} catch (error) {
|
compressionMethod: compressionMethod
|
||||||
if (error.name === cache.ValidationError.name) {
|
});
|
||||||
throw error;
|
if (cacheId == -1) {
|
||||||
} else if (error.name === cache.ReserveCacheError.name) {
|
core.info(
|
||||||
core.info(error.message);
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
} else {
|
);
|
||||||
utils.logWarning(error.message);
|
return;
|
||||||
}
|
}
|
||||||
|
core.debug(`Cache ID: ${cacheId}`);
|
||||||
|
const cachePaths = await utils.resolvePaths(
|
||||||
|
core
|
||||||
|
.getInput(Inputs.Path, { required: true })
|
||||||
|
.split("\n")
|
||||||
|
.filter(x => x !== "")
|
||||||
|
);
|
||||||
|
|
||||||
|
core.debug("Cache Paths:");
|
||||||
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
|
|
||||||
|
const archiveFolder = await utils.createTempDirectory();
|
||||||
|
const archivePath = path.join(
|
||||||
|
archiveFolder,
|
||||||
|
utils.getCacheFileName(compressionMethod)
|
||||||
|
);
|
||||||
|
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
|
await createTar(archiveFolder, cachePaths, compressionMethod);
|
||||||
|
|
||||||
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||||
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
|
if (archiveFileSize > fileSizeLimit) {
|
||||||
|
utils.logWarning(
|
||||||
|
`Cache size of ~${Math.round(
|
||||||
|
archiveFileSize / (1024 * 1024)
|
||||||
|
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
|
await cacheHttpClient.saveCache(cacheId, archivePath);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
utils.logWarning(error.message);
|
utils.logWarning(error.message);
|
||||||
}
|
}
|
||||||
|
87
src/tar.ts
Normal file
87
src/tar.ts
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
import { exec } from "@actions/exec";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import { existsSync, writeFileSync } from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { CompressionMethod } from "./constants";
|
||||||
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
async function getTarPath(args: string[]): Promise<string> {
|
||||||
|
// Explicitly use BSD Tar on Windows
|
||||||
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
||||||
|
if (existsSync(systemTar)) {
|
||||||
|
return systemTar;
|
||||||
|
} else if (await utils.useGnuTar()) {
|
||||||
|
args.push("--force-local");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await io.which("tar", true);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function execTar(args: string[], cwd?: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Tar failed with error: ${error?.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getWorkingDirectory(): string {
|
||||||
|
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function extractTar(
|
||||||
|
archivePath: string,
|
||||||
|
compressionMethod: CompressionMethod
|
||||||
|
): Promise<void> {
|
||||||
|
// Create directory to extract tar into
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
await io.mkdirP(workingDirectory);
|
||||||
|
// --d: Decompress.
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
const args = [
|
||||||
|
...(compressionMethod == CompressionMethod.Zstd
|
||||||
|
? ["--use-compress-program", "zstd -d --long=30"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-xf",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||||
|
];
|
||||||
|
await execTar(args);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createTar(
|
||||||
|
archiveFolder: string,
|
||||||
|
sourceDirectories: string[],
|
||||||
|
compressionMethod: CompressionMethod
|
||||||
|
): Promise<void> {
|
||||||
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
|
const manifestFilename = "manifest.txt";
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
writeFileSync(
|
||||||
|
path.join(archiveFolder, manifestFilename),
|
||||||
|
sourceDirectories.join("\n")
|
||||||
|
);
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
const args = [
|
||||||
|
...(compressionMethod == CompressionMethod.Zstd
|
||||||
|
? ["--use-compress-program", "zstd -T0 --long=30"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-cf",
|
||||||
|
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"--files-from",
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
await execTar(args, archiveFolder);
|
||||||
|
}
|
@ -1,35 +1,87 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as exec from "@actions/exec";
|
||||||
|
import * as glob from "@actions/glob";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import * as fs from "fs";
|
||||||
|
import * as os from "os";
|
||||||
|
import * as path from "path";
|
||||||
|
import * as util from "util";
|
||||||
|
import * as uuidV4 from "uuid/v4";
|
||||||
|
|
||||||
import { Outputs, RefKey, State } from "../constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Outputs,
|
||||||
|
State
|
||||||
|
} from "../constants";
|
||||||
|
import { ArtifactCacheEntry } from "../contracts";
|
||||||
|
|
||||||
export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
|
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
||||||
|
export async function createTempDirectory(): Promise<string> {
|
||||||
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
|
||||||
|
let tempDirectory: string = process.env["RUNNER_TEMP"] || "";
|
||||||
|
|
||||||
|
if (!tempDirectory) {
|
||||||
|
let baseLocation: string;
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
// On Windows use the USERPROFILE env variable
|
||||||
|
baseLocation = process.env["USERPROFILE"] || "C:\\";
|
||||||
|
} else {
|
||||||
|
if (process.platform === "darwin") {
|
||||||
|
baseLocation = "/Users";
|
||||||
|
} else {
|
||||||
|
baseLocation = "/home";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tempDirectory = path.join(baseLocation, "actions", "temp");
|
||||||
|
}
|
||||||
|
|
||||||
|
const dest = path.join(tempDirectory, uuidV4.default());
|
||||||
|
await io.mkdirP(dest);
|
||||||
|
return dest;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getArchiveFileSize(path: string): number {
|
||||||
|
return fs.statSync(path).size;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function isExactKeyMatch(
|
||||||
|
key: string,
|
||||||
|
cacheResult?: ArtifactCacheEntry
|
||||||
|
): boolean {
|
||||||
return !!(
|
return !!(
|
||||||
cacheKey &&
|
cacheResult &&
|
||||||
cacheKey.localeCompare(key, undefined, {
|
cacheResult.cacheKey &&
|
||||||
|
cacheResult.cacheKey.localeCompare(key, undefined, {
|
||||||
sensitivity: "accent"
|
sensitivity: "accent"
|
||||||
}) === 0
|
}) === 0
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setCacheState(state: string): void {
|
export function setCacheState(state: ArtifactCacheEntry): void {
|
||||||
core.saveState(State.CacheMatchedKey, state);
|
core.saveState(State.CacheResult, JSON.stringify(state));
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setCacheHitOutput(isCacheHit: boolean): void {
|
export function setCacheHitOutput(isCacheHit: boolean): void {
|
||||||
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setOutputAndState(key: string, cacheKey?: string): void {
|
export function setOutputAndState(
|
||||||
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
|
key: string,
|
||||||
// Store the matched cache key if it exists
|
cacheResult?: ArtifactCacheEntry
|
||||||
cacheKey && setCacheState(cacheKey);
|
): void {
|
||||||
|
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
|
||||||
|
// Store the cache result if it exists
|
||||||
|
cacheResult && setCacheState(cacheResult);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getCacheState(): string | undefined {
|
export function getCacheState(): ArtifactCacheEntry | undefined {
|
||||||
const cacheKey = core.getState(State.CacheMatchedKey);
|
const stateData = core.getState(State.CacheResult);
|
||||||
if (cacheKey) {
|
core.debug(`State: ${stateData}`);
|
||||||
core.debug(`Cache state/key: ${cacheKey}`);
|
if (stateData) {
|
||||||
return cacheKey;
|
return JSON.parse(stateData) as ArtifactCacheEntry;
|
||||||
}
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
@ -40,19 +92,81 @@ export function logWarning(message: string): void {
|
|||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cache token authorized for all events that are tied to a ref
|
export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
||||||
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
const paths: string[] = [];
|
||||||
export function isValidEvent(): boolean {
|
const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
const globber = await glob.create(patterns.join("\n"), {
|
||||||
|
implicitDescendants: false
|
||||||
|
});
|
||||||
|
|
||||||
|
for await (const file of globber.globGenerator()) {
|
||||||
|
const relativeFile = path.relative(workspace, file);
|
||||||
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
|
paths.push(`${relativeFile}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getInputAsArray(
|
export function getSupportedEvents(): string[] {
|
||||||
name: string,
|
return [Events.Push, Events.PullRequest];
|
||||||
options?: core.InputOptions
|
}
|
||||||
): string[] {
|
|
||||||
return core
|
// Currently the cache token is only authorized for push and pull_request events
|
||||||
.getInput(name, options)
|
// All other events will fail when reading and saving the cache
|
||||||
.split("\n")
|
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
|
||||||
.map(s => s.trim())
|
export function isValidEvent(): boolean {
|
||||||
.filter(x => x !== "");
|
const githubEvent = process.env[Events.Key] || "";
|
||||||
|
return getSupportedEvents().includes(githubEvent);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function unlinkFile(path: fs.PathLike): Promise<void> {
|
||||||
|
return util.promisify(fs.unlink)(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getVersion(app: string): Promise<string> {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
|
let versionOutput = "";
|
||||||
|
try {
|
||||||
|
await exec.exec(`${app} --version`, [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString()),
|
||||||
|
stderr: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
core.debug(err.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
versionOutput = versionOutput.trim();
|
||||||
|
core.debug(versionOutput);
|
||||||
|
return versionOutput;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCompressionMethod(): Promise<CompressionMethod> {
|
||||||
|
// Disabling zstd on Windows due to https://github.com/actions/cache/issues/301
|
||||||
|
if (os.platform() === "win32") {
|
||||||
|
return CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
const versionOutput = await getVersion("zstd");
|
||||||
|
return versionOutput.toLowerCase().includes("zstd command line interface")
|
||||||
|
? CompressionMethod.Zstd
|
||||||
|
: CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCacheFileName(compressionMethod: CompressionMethod): string {
|
||||||
|
return compressionMethod == CompressionMethod.Zstd
|
||||||
|
? CacheFilename.Zstd
|
||||||
|
: CacheFilename.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function useGnuTar(): Promise<boolean> {
|
||||||
|
const versionOutput = await getVersion("tar");
|
||||||
|
return versionOutput.toLowerCase().includes("gnu tar");
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user