Compare commits

..

5 Commits

Author SHA1 Message Date
c8d75a8073 Use tries on all API calls 2020-05-10 10:36:06 -04:00
a8b61326cf Disable zstd on Windows due to issue #301 2020-05-08 16:09:03 -04:00
25b1a139de Revert "Test disabling concurrency"
This reverts commit 6efe05572d.
2020-05-08 15:59:00 -04:00
6efe05572d Test disabling concurrency 2020-05-08 12:05:32 -04:00
aced43a650 Fix uploadChunk and add generic retry method 2020-05-08 11:37:53 -04:00
20 changed files with 8006 additions and 7267 deletions

View File

@ -4,13 +4,11 @@ on:
pull_request:
branches:
- master
- releases/**
paths-ignore:
- '**.md'
push:
branches:
- master
- releases/**
paths-ignore:
- '**.md'
@ -19,7 +17,7 @@ jobs:
build:
strategy:
matrix:
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
@ -47,23 +45,12 @@ jobs:
run: npm run lint
- name: Build & Test
run: npm run test
- name: Ensure dist/ folder is up-to-date
if: ${{ runner.os == 'Linux' }}
shell: bash
run: |
npm run build
if [ "$(git status --porcelain | wc -l)" -gt "0" ]; then
echo "Detected uncommitted changes after build. See status below:"
git diff
exit 1
fi
# End to end save and restore
test-save:
strategy:
matrix:
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
@ -86,7 +73,7 @@ jobs:
needs: test-save
strategy:
matrix:
os: [ubuntu-latest, ubuntu-16.04, windows-latest, macOS-latest]
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:

View File

@ -8,28 +8,6 @@ This action allows caching dependencies and build outputs to improve workflow ex
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
## What's New
* Added support for multiple paths, [glob patterns](https://github.com/actions/toolkit/tree/master/packages/glob), and single file caches.
```yaml
- name: Cache multiple paths
uses: actions/cache@v2
with:
path: |
~/cache
!~/cache/exclude
**/node_modules
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
```
* Increased performance and improved cache sizes using `zstd` compression for Linux and macOS runners
* Allowed caching for all events with a ref. See [events that trigger workflow](https://help.github.com/en/actions/reference/events-that-trigger-workflows) for info on which events do not have a `GITHUB_REF`
* Released the [`@actions/cache`](https://github.com/actions/toolkit/tree/master/packages/cache) npm package to allow other actions to utilize caching
* Added a best-effort cleanup step to delete the archive after extraction to reduce storage space
Refer [here](https://github.com/actions/cache/blob/v1/README.md) for previous versions
## Usage
### Pre-requisites
@ -37,7 +15,7 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
### Inputs
* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/master/packages/glob) for supported patterns.
* `path` - A directory to store and save the cache
* `key` - An explicit key for restoring and saving the cache
* `restore-keys` - An ordered list of keys to use for restoring the cache if no cache hit occurred for key
@ -47,11 +25,6 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
> See [Skipping steps based on cache-hit](#Skipping-steps-based-on-cache-hit) for info on using this output
### Cache scopes
The cache is scoped to the key and branch. The default branch cache is available to other branches.
See [Matching a cache key](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key) for more info.
### Example workflow
```yaml
@ -68,7 +41,7 @@ jobs:
- name: Cache Primes
id: cache-primes
uses: actions/cache@v2
uses: actions/cache@v1
with:
path: prime-numbers
key: ${{ runner.os }}-primes
@ -88,7 +61,6 @@ Every programming language and framework has its own way of caching.
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
- [C# - Nuget](./examples.md#c---nuget)
- [D - DUB](./examples.md#d---dub)
- [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules)
- [Haskell - Cabal](./examples.md#haskell---cabal)
@ -108,39 +80,6 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
## Creating a cache key
A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions.
For example, using the [`hashFiles`](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#hashfiles) function allows you to create a new cache when dependencies change.
```yaml
- uses: actions/cache@v2
with:
path: |
path/to/dependencies
some/other/dependencies
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
```
Additionally, you can use arbitrary command output in a cache key, such as a date or software version:
```yaml
# http://man7.org/linux/man-pages/man1/date.1.html
- name: Get Date
id: get-date
run: |
echo "::set-output name=date::$(/bin/date -u "+%Y%m%d")"
shell: bash
- uses: actions/cache@v2
with:
path: path/to/dependencies
key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }}
```
See [Using contexts to create cache keys](https://help.github.com/en/actions/configuring-and-managing-workflows/caching-dependencies-to-speed-up-workflows#using-contexts-to-create-cache-keys)
## Cache Limits
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
@ -154,7 +93,7 @@ Example:
steps:
- uses: actions/checkout@v2
- uses: actions/cache@v2
- uses: actions/cache@v1
id: cache
with:
path: path/to/dependencies
@ -168,7 +107,7 @@ steps:
> Note: The `id` defined in `actions/cache` must match the `id` in the `if` statement (i.e. `steps.[ID].outputs.cache-hit`)
## Contributing
We would love for you to contribute to `actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
We would love for you to contribute to `@actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
## License
The scripts and documentation in this project are released under the [MIT License](LICENSE)

View File

@ -1,72 +1,97 @@
import * as core from "@actions/core";
import * as io from "@actions/io";
import { promises as fs } from "fs";
import * as os from "os";
import * as path from "path";
import { Events, Outputs, RefKey, State } from "../src/constants";
import { Events, Outputs, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
import uuid = require("uuid");
jest.mock("@actions/core");
jest.mock("os");
beforeAll(() => {
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
});
function getTempDir(): string {
return path.join(__dirname, "_temp", "actionUtils");
}
afterEach(() => {
delete process.env[Events.Key];
delete process.env[RefKey];
});
test("isExactKeyMatch with undefined cache key returns false", () => {
const key = "linux-rust";
const cacheKey = undefined;
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await io.rmRF(getTempDir());
});
test("isExactKeyMatch with empty cache key returns false", () => {
const key = "linux-rust";
const cacheKey = "";
test("getArchiveFileSize returns file size", () => {
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
const size = actionUtils.getArchiveFileSize(filePath);
expect(size).toBe(11);
});
test("isExactKeyMatch with undefined cache entry returns false", () => {
const key = "linux-rust";
const cacheEntry = undefined;
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with empty cache entry returns false", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with different keys returns false", () => {
const key = "linux-rust";
const cacheKey = "linux-";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-"
};
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with different key accents returns false", () => {
const key = "linux-áccent";
const cacheKey = "linux-accent";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-accent"
};
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(false);
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with same key returns true", () => {
const key = "linux-rust";
const cacheKey = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust"
};
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
});
test("isExactKeyMatch with same key and different casing returns true", () => {
const key = "linux-rust";
const cacheKey = "LINUX-RUST";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "LINUX-RUST"
};
expect(actionUtils.isExactKeyMatch(key, cacheKey)).toBe(true);
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
});
test("setOutputAndState with undefined entry to set cache-hit output", () => {
const key = "linux-rust";
const cacheKey = undefined;
const cacheEntry = undefined;
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheKey);
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
expect(setOutputMock).toHaveBeenCalledTimes(1);
@ -76,33 +101,43 @@ test("setOutputAndState with undefined entry to set cache-hit output", () => {
test("setOutputAndState with exact match to set cache-hit output and state", () => {
const key = "linux-rust";
const cacheKey = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust"
};
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheKey);
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
expect(saveStateMock).toHaveBeenCalledWith(
State.CacheResult,
JSON.stringify(cacheEntry)
);
expect(saveStateMock).toHaveBeenCalledTimes(1);
});
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
const key = "linux-rust";
const cacheKey = "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"
};
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheKey);
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledWith(State.CacheMatchedKey, cacheKey);
expect(saveStateMock).toHaveBeenCalledWith(
State.CacheResult,
JSON.stringify(cacheEntry)
);
expect(saveStateMock).toHaveBeenCalledTimes(1);
});
@ -116,23 +151,27 @@ test("getCacheState with no state returns undefined", () => {
expect(state).toBe(undefined);
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
expect(getStateMock).toHaveBeenCalledTimes(1);
});
test("getCacheState with valid state", () => {
const cacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
const getStateMock = jest.spyOn(core, "getState");
getStateMock.mockImplementation(() => {
return cacheKey;
return JSON.stringify(cacheEntry);
});
const state = actionUtils.getCacheState();
expect(state).toEqual(cacheKey);
expect(state).toEqual(cacheEntry);
expect(getStateMock).toHaveBeenCalledWith(State.CacheMatchedKey);
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
expect(getStateMock).toHaveBeenCalledTimes(1);
});
@ -146,7 +185,7 @@ test("logWarning logs a message with a warning prefix", () => {
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`);
});
test("isValidEvent returns false for event that does not have a branch or tag", () => {
test("isValidEvent returns false for unknown event", () => {
const event = "foo";
process.env[Events.Key] = event;
@ -155,42 +194,164 @@ test("isValidEvent returns false for event that does not have a branch or tag",
expect(isValidEvent).toBe(false);
});
test("isValidEvent returns true for event that has a ref", () => {
test("resolvePaths with no ~ in path", async () => {
const filePath = ".cache";
// Create the following layout:
// cwd
// cwd/.cache
// cwd/.cache/file.txt
const root = path.join(getTempDir(), "no-tilde");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
const cache = path.join(root, ".cache");
await fs.mkdir(cache, { recursive: true });
await fs.writeFile(path.join(cache, "file.txt"), "cached");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [filePath];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("resolvePaths with ~ in path", async () => {
const cacheDir = uuid();
const filePath = `~/${cacheDir}`;
// Create the following layout:
// ~/uuid
// ~/uuid/file.txt
const homedir = jest.requireActual("os").homedir();
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return homedir;
});
const target = path.join(homedir, cacheDir);
await fs.mkdir(target, { recursive: true });
await fs.writeFile(path.join(target, "file.txt"), "cached");
const root = getTempDir();
process.env["GITHUB_WORKSPACE"] = root;
try {
const resolvedPath = await actionUtils.resolvePaths([filePath]);
const expectedPath = [path.relative(root, target)];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
await io.rmRF(target);
}
});
test("resolvePaths with home not found", async () => {
const filePath = "~/.cache/yarn";
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return "";
});
await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
"Unable to determine HOME directory"
);
});
test("resolvePaths inclusion pattern returns found", async () => {
const pattern = "*.ts";
// Create the following layout:
// inclusion-patterns
// inclusion-patterns/miss.txt
// inclusion-patterns/test.ts
const root = path.join(getTempDir(), "inclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths([pattern]);
const expectedPath = ["test.ts"];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("resolvePaths exclusion pattern returns not found", async () => {
const patterns = ["*.ts", "!test.ts"];
// Create the following layout:
// exclusion-patterns
// exclusion-patterns/miss.txt
// exclusion-patterns/test.ts
const root = path.join(getTempDir(), "exclusion-patterns");
// tarball entries will be relative to workspace
process.env["GITHUB_WORKSPACE"] = root;
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, "miss.txt"), "no match");
await fs.writeFile(path.join(root, "test.ts"), "no match");
const originalCwd = process.cwd();
try {
process.chdir(root);
const resolvedPath = await actionUtils.resolvePaths(patterns);
const expectedPath = [];
expect(resolvedPath).toStrictEqual(expectedPath);
} finally {
process.chdir(originalCwd);
}
});
test("isValidEvent returns true for push event", () => {
const event = Events.Push;
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});
test("getInputAsArray returns empty array if not required and missing", () => {
expect(actionUtils.getInputAsArray("foo")).toEqual([]);
test("isValidEvent returns true for pull request event", () => {
const event = Events.PullRequest;
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});
test("getInputAsArray throws error if required and missing", () => {
expect(() =>
actionUtils.getInputAsArray("foo", { required: true })
).toThrowError();
});
test("unlinkFile unlinks file", async () => {
const testDirectory = await fs.mkdtemp("unlinkFileTest");
const testFile = path.join(testDirectory, "test.txt");
await fs.writeFile(testFile, "hello world");
test("getInputAsArray handles single line correctly", () => {
testUtils.setInput("foo", "bar");
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar"]);
});
await actionUtils.unlinkFile(testFile);
test("getInputAsArray handles multiple lines correctly", () => {
testUtils.setInput("foo", "bar\nbaz");
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
});
// This should throw as testFile should not exist
await expect(fs.stat(testFile)).rejects.toThrow();
test("getInputAsArray handles different new lines correctly", () => {
testUtils.setInput("foo", "bar\r\nbaz");
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
});
test("getInputAsArray handles empty lines correctly", () => {
testUtils.setInput("foo", "\n\nbar\n\nbaz\n\n");
expect(actionUtils.getInputAsArray("foo")).toEqual(["bar", "baz"]);
await fs.rmdir(testDirectory);
});

View File

@ -0,0 +1,177 @@
import { getCacheVersion, retry } from "../src/cacheHttpClient";
import { CompressionMethod, Inputs } from "../src/constants";
import * as testUtils from "../src/utils/testUtils";
afterEach(() => {
testUtils.clearInputs();
});
test("getCacheVersion with path input and compression method undefined returns version", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion();
expect(result).toEqual(
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
);
});
test("getCacheVersion with zstd compression returns version", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion(CompressionMethod.Zstd);
expect(result).toEqual(
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
);
});
test("getCacheVersion with gzip compression does not change vesion", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const result = getCacheVersion(CompressionMethod.Gzip);
expect(result).toEqual(
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
);
});
test("getCacheVersion with no input throws", async () => {
expect(() => getCacheVersion()).toThrow();
});
interface TestResponse {
statusCode: number;
result: string | null;
}
function handleResponse(
response: TestResponse | undefined
): Promise<TestResponse> {
if (!response) {
fail("Retry method called too many times");
}
if (response.statusCode === 999) {
throw Error("Test Error");
} else {
return Promise.resolve(response);
}
}
async function testRetryExpectingResult(
responses: Array<TestResponse>,
expectedResult: string | null
): Promise<void> {
responses = responses.reverse(); // Reverse responses since we pop from end
const actualResult = await retry(
"test",
() => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
);
expect(actualResult.result).toEqual(expectedResult);
}
async function testRetryExpectingError(
responses: Array<TestResponse>
): Promise<void> {
responses = responses.reverse(); // Reverse responses since we pop from end
expect(
retry(
"test",
() => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
)
).rejects.toBeInstanceOf(Error);
}
test("retry works on successful response", async () => {
await testRetryExpectingResult(
[
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry works after retryable status code", async () => {
await testRetryExpectingResult(
[
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry fails after exhausting retries", async () => {
await testRetryExpectingError([
{
statusCode: 503,
result: null
},
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: "Ok"
}
]);
});
test("retry fails after non-retryable status code", async () => {
await testRetryExpectingError([
{
statusCode: 500,
result: null
},
{
statusCode: 200,
result: "Ok"
}
]);
});
test("retry works after error", async () => {
await testRetryExpectingResult(
[
{
statusCode: 999,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry returns after client error", async () => {
await testRetryExpectingResult(
[
{
statusCode: 400,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
null
);
});

View File

@ -1,11 +1,21 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as path from "path";
import { Events, Inputs, RefKey } from "../src/constants";
import * as cacheHttpClient from "../src/cacheHttpClient";
import {
CacheFilename,
CompressionMethod,
Events,
Inputs
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
@ -21,23 +31,24 @@ beforeAll(() => {
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
(name, options) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getInputAsArray(name, options);
}
);
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getCacheFileName(cm);
});
});
beforeEach(() => {
process.env[Events.Key] = Events.Push;
process.env[RefKey] = "refs/heads/feature-branch";
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
delete process.env[RefKey];
});
test("restore with invalid event outputs warning", async () => {
@ -45,19 +56,16 @@ test("restore with invalid event outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
delete process.env[RefKey];
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with no path should fail", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
expect(failedMock).not.toHaveBeenCalledWith(
"Input required and not supplied: path"
@ -67,89 +75,71 @@ test("restore with no path should fail", async () => {
test("restore with no key", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const failedMock = jest.spyOn(core, "setFailed");
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: key"
);
});
test("restore with too many keys should fail", async () => {
const path = "node_modules";
const key = "node-test";
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
testUtils.setInputs({
path: path,
path: "node_modules",
key,
restoreKeys
});
const failedMock = jest.spyOn(core, "setFailed");
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, restoreKeys);
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: Keys are limited to a maximum of 10.`
);
});
test("restore with large key should fail", async () => {
const path = "node_modules";
const key = "foo".repeat(512); // Over the 512 character limit
testUtils.setInputs({
path: path,
path: "node_modules",
key
});
const failedMock = jest.spyOn(core, "setFailed");
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
);
});
test("restore with invalid key should fail", async () => {
const path = "node_modules";
const key = "comma,comma";
testUtils.setInputs({
path: path,
path: "node_modules",
key
});
const failedMock = jest.spyOn(core, "setFailed");
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot contain commas.`
);
});
test("restore with no cache found", async () => {
const path = "node_modules";
const key = "node-test";
testUtils.setInputs({
path: path,
path: "node_modules",
key
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementationOnce(() => {
return Promise.resolve(undefined);
});
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
return Promise.resolve(null);
});
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(failedMock).toHaveBeenCalledTimes(0);
@ -159,28 +149,25 @@ test("restore with no cache found", async () => {
});
test("restore with server error should fail", async () => {
const path = "node_modules";
const key = "node-test";
testUtils.setInputs({
path: path,
path: "node_modules",
key
});
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred");
});
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
throw new Error("HTTP Error Occurred");
});
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(logWarningMock).toHaveBeenCalledTimes(1);
@ -193,11 +180,10 @@ test("restore with server error should fail", async () => {
});
test("restore with restore keys and no cache found", async () => {
const path = "node_modules";
const key = "node-test";
const restoreKey = "node-";
testUtils.setInputs({
path: path,
path: "node_modules",
key,
restoreKeys: [restoreKey]
});
@ -205,17 +191,14 @@ test("restore with restore keys and no cache found", async () => {
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementationOnce(() => {
return Promise.resolve(undefined);
});
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
return Promise.resolve(null);
});
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(failedMock).toHaveBeenCalledTimes(0);
@ -224,43 +207,161 @@ test("restore with restore keys and no cache found", async () => {
);
});
test("restore with cache found for key", async () => {
const path = "node_modules";
test("restore with gzip compressed cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: path,
path: "node_modules",
key
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, CacheFilename.Gzip);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 142;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementationOnce(() => {
return Promise.resolve(key);
});
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("restore with a pull request event and zstd compressed cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
key
});
process.env[Events.Key] = Events.PullRequest;
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, CacheFilename.Zstd);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 62915000;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("restore with cache found for restore key", async () => {
const path = "node_modules";
const key = "node-test";
const restoreKey = "node-";
testUtils.setInputs({
path: path,
path: "node_modules",
key,
restoreKeys: [restoreKey]
});
@ -268,19 +369,60 @@ test("restore with cache found for restore key", async () => {
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: restoreKey,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, CacheFilename.Zstd);
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 142;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementationOnce(() => {
return Promise.resolve(restoreKey);
});
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, [restoreKey]);
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
compressionMethod: compression
});
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
@ -288,4 +430,5 @@ test("restore with cache found for restore key", async () => {
`Cache restored from key: ${restoreKey}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});

View File

@ -1,13 +1,22 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as path from "path";
import { Events, Inputs, RefKey } from "../src/constants";
import * as cacheHttpClient from "../src/cacheHttpClient";
import {
CacheFilename,
CompressionMethod,
Events,
Inputs
} from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/core");
jest.mock("@actions/cache");
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
@ -19,14 +28,6 @@ beforeAll(() => {
return jest.requireActual("../src/utils/actionUtils").getCacheState();
});
jest.spyOn(actionUtils, "getInputAsArray").mockImplementation(
(name, options) => {
return jest
.requireActual("../src/utils/actionUtils")
.getInputAsArray(name, options);
}
);
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
return jest
@ -39,17 +40,35 @@ beforeAll(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
async filePaths => {
return filePaths.map(x => path.resolve(x));
}
);
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
return Promise.resolve("/foo/bar");
});
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getCacheFileName(cm);
});
});
beforeEach(() => {
process.env[Events.Key] = Events.Push;
process.env[RefKey] = "refs/heads/feature-branch";
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
delete process.env[RefKey];
});
test("save with invalid event outputs warning", async () => {
@ -57,10 +76,9 @@ test("save with invalid event outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
delete process.env[RefKey];
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
@ -69,21 +87,25 @@ test("save with no primary key in state outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return "";
});
const saveCacheMock = jest.spyOn(cache, "saveCache");
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledWith(
`Error retrieving key from state.`
);
@ -96,25 +118,33 @@ test("save with exact match returns early", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = primaryKey;
const cacheEntry: ArtifactCacheEntry = {
cacheKey: primaryKey,
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const saveCacheMock = jest.spyOn(cache, "saveCache");
const createTarMock = jest.spyOn(tar, "createTar");
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
@ -123,22 +153,25 @@ test("save with missing input outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = "Linux-node-";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const saveCacheMock = jest.spyOn(cache, "saveCache");
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
@ -151,12 +184,17 @@ test("save with large cache outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = "Linux-node-";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
@ -164,26 +202,36 @@ test("save with large cache outputs warning", async () => {
});
const inputPath = "node_modules";
const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath);
const saveCacheMock = jest
.spyOn(cache, "saveCache")
.mockImplementationOnce(() => {
throw new Error(
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
);
});
const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize;
});
const compression = CompressionMethod.Gzip;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
const archiveFolder = "/foo/bar";
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with reserve cache failure outputs warning", async () => {
@ -192,12 +240,17 @@ test("save with reserve cache failure outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = "Linux-node-";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
@ -207,26 +260,35 @@ test("save with reserve cache failure outputs warning", async () => {
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const saveCacheMock = jest
.spyOn(cache, "saveCache")
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
const actualCache = jest.requireActual("@actions/cache");
const error = new actualCache.ReserveCacheError(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
throw error;
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with server error outputs warning", async () => {
@ -234,12 +296,17 @@ test("save with server error outputs warning", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = "Linux-node-";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
@ -247,35 +314,70 @@ test("save with server error outputs warning", async () => {
});
const inputPath = "node_modules";
const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest
.spyOn(cache, "saveCache")
.spyOn(cacheHttpClient, "saveCache")
.mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred");
});
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = "Linux-node-";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
@ -283,19 +385,44 @@ test("save with valid inputs uploads a cache", async () => {
});
const inputPath = "node_modules";
const cachePaths = [path.resolve(inputPath)];
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const saveCacheMock = jest
.spyOn(cache, "saveCache")
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
const compression = CompressionMethod.Zstd;
const getCompressionMock = jest
.spyOn(actionUtils, "getCompressionMethod")
.mockReturnValue(Promise.resolve(compression));
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
compressionMethod: compression
});
const archiveFolder = "/foo/bar";
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(
archiveFolder,
cachePaths,
compression
);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(getCompressionMock).toHaveBeenCalledTimes(1);
});

204
__tests__/tar.test.ts Normal file
View File

@ -0,0 +1,204 @@
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import { CacheFilename, CompressionMethod } from "../src/constants";
import * as tar from "../src/tar";
import * as utils from "../src/utils/actionUtils";
import fs = require("fs");
jest.mock("@actions/exec");
jest.mock("@actions/io");
const IS_WINDOWS = process.platform === "win32";
function getTempDir(): string {
return path.join(__dirname, "_temp", "tar");
}
beforeAll(async () => {
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
process.env["GITHUB_WORKSPACE"] = process.cwd();
await jest.requireActual("@actions/io").rmRF(getTempDir());
});
afterAll(async () => {
delete process.env["GITHUB_WORKSPACE"];
await jest.requireActual("@actions/io").rmRF(getTempDir());
});
test("zstd extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Zstd);
expect(mkdirMock).toHaveBeenCalledWith(workspace);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"--use-compress-program",
"zstd -d --long=30",
"-xf",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
],
{ cwd: undefined }
);
});
test("gzip extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Gzip);
expect(mkdirMock).toHaveBeenCalledWith(workspace);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"-z",
"-xf",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
],
{ cwd: undefined }
);
});
test("gzip extract GNU tar on windows", async () => {
if (IS_WINDOWS) {
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
const isGnuMock = jest
.spyOn(utils, "useGnuTar")
.mockReturnValue(Promise.resolve(true));
const execMock = jest.spyOn(exec, "exec");
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
const workspace = process.env["GITHUB_WORKSPACE"];
await tar.extractTar(archivePath, CompressionMethod.Gzip);
expect(isGnuMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"tar"`,
[
"-z",
"-xf",
archivePath.replace(/\\/g, "/"),
"-P",
"-C",
workspace?.replace(/\\/g, "/"),
"--force-local"
],
{ cwd: undefined }
);
}
});
test("zstd create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await fs.promises.mkdir(archiveFolder, { recursive: true });
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Zstd
);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"--use-compress-program",
"zstd -T0 --long=30",
"-cf",
IS_WINDOWS
? CacheFilename.Zstd.replace(/\\/g, "/")
: CacheFilename.Zstd,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
});
test("gzip create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archiveFolder = getTempDir();
const workspace = process.env["GITHUB_WORKSPACE"];
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
await fs.promises.mkdir(archiveFolder, { recursive: true });
await tar.createTar(
archiveFolder,
sourceDirectories,
CompressionMethod.Gzip
);
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(
`"${tarPath}"`,
[
"-z",
"-cf",
IS_WINDOWS
? CacheFilename.Gzip.replace(/\\/g, "/")
: CacheFilename.Gzip,
"-P",
"-C",
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
"--files-from",
"manifest.txt"
],
{
cwd: archiveFolder
}
);
});

View File

@ -3,7 +3,7 @@ description: 'Cache artifacts like dependencies and build outputs to improve wor
author: 'GitHub'
inputs:
path:
description: 'A list of files, directories, and wildcard patterns to cache and restore'
description: 'A directory to store and save the cache'
required: true
key:
description: 'An explicit key for restoring and saving the cache'

3382
dist/restore/index.js vendored

File diff suppressed because it is too large Load Diff

3376
dist/save/index.js vendored

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,6 @@
- [Examples](#examples)
- [C# - NuGet](#c---nuget)
- [D - DUB](#d---dub)
- [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules)
- [Haskell - Cabal](#haskell---cabal)
@ -35,7 +34,7 @@
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
@ -44,25 +43,13 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
```
Depending on the environment, huge packages might be pre-installed in the global cache folder.
With `actions/cache@v2` you can now exclude unwanted packages with [exclude pattern](https://github.com/actions/toolkit/tree/master/packages/glob#exclude-patterns)
```yaml
- uses: actions/cache@v2
with:
path: |
~/.nuget/packages
!~/.nuget/packages/unwanted
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
Or you could move the cache folder like below.
If you do not want to include them, consider to move the cache folder like below.
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
```yaml
env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps:
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
@ -70,33 +57,9 @@ steps:
${{ runner.os }}-nuget-
```
## D - DUB
### POSIX
```yaml
- uses: actions/cache@v2
with:
path: ~/.dub
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json') }}
restore-keys: |
${{ runner.os }}-dub-
```
### Windows
```yaml
- uses: actions/cache@v2
with:
path: ~\AppData\Local\dub
key: ${{ runner.os }}-dub-${{ hashFiles('**/dub.json') }}
restore-keys: |
${{ runner.os }}-dub-
```
## Elixir - Mix
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: deps
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
@ -107,7 +70,7 @@ steps:
## Go - Modules
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
@ -120,20 +83,27 @@ steps:
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
```yaml
- uses: actions/cache@v2
name: Cache ~/.cabal/packages, ~/.cabal/store and dist-newstyle
- uses: actions/cache@v1
name: Cache ~/.cabal/packages
with:
path: |
~/.cabal/packages
~/.cabal/store
dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}
path: ~/.cabal/packages
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
- uses: actions/cache@v1
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle
with:
path: dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
```
## Java - Gradle
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
@ -144,7 +114,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
## Java - Maven
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
@ -161,7 +131,7 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
### macOS and Ubuntu
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -176,7 +146,7 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -191,7 +161,7 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
@ -201,9 +171,10 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
## Node - Lerna
>Note this example uses the new multi-paths feature and is only available at `master`
```yaml
- name: restore lerna
uses: actions/cache@v2
uses: actions/cache@master
with:
path: |
node_modules
@ -219,7 +190,7 @@ The yarn cache directory will depend on your operating system and version of `ya
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
- uses: actions/cache@v1
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
@ -233,7 +204,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
```yaml
- name: Restore Cache
id: restore-cache
uses: actions/cache@v2
uses: actions/cache@v1
with:
path: _export
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
@ -263,7 +234,7 @@ Esy allows you to export built dependencies and import pre-built dependencies.
id: composer-cache
run: |
echo "::set-output name=dir::$(composer config cache-files-dir)"
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
@ -282,7 +253,7 @@ Locations:
### Simple example
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
@ -295,7 +266,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.cache/pip
@ -303,7 +274,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v2
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Caches/pip
@ -311,7 +282,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v2
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\pip\Cache
@ -330,7 +301,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
echo "::set-output name=dir::$(pip cache dir)"
- name: pip cache
uses: actions/cache@v2
uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
@ -347,7 +318,7 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
run: |
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
@ -366,7 +337,7 @@ Locations:
### Simple example
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
@ -379,7 +350,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.local/share/renv
@ -387,7 +358,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v2
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Application Support/renv
@ -395,7 +366,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v2
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\renv
@ -407,7 +378,7 @@ Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
## Ruby - Bundler
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: vendor/bundle
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
@ -426,31 +397,42 @@ When dependencies are installed later in the workflow, we must specify the same
## Rust - Cargo
```yaml
- uses: actions/cache@v2
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: |
~/.cargo/registry
~/.cargo/git
target
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
```
## Scala - SBT
```yaml
- name: Cache SBT
uses: actions/cache@v2
- name: Cache SBT ivy cache
uses: actions/cache@v1
with:
path: |
~/.ivy2/cache
~/.sbt
path: ~/.ivy2/cache
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
- name: Cache SBT
uses: actions/cache@v1
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
```
## Swift, Objective-C - Carthage
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: Carthage
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
@ -461,7 +443,7 @@ When dependencies are installed later in the workflow, we must specify the same
## Swift, Objective-C - CocoaPods
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: Pods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
@ -472,7 +454,7 @@ When dependencies are installed later in the workflow, we must specify the same
## Swift - Swift Package Manager
```yaml
- uses: actions/cache@v2
- uses: actions/cache@v1
with:
path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}

6149
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -25,13 +25,16 @@
"dependencies": {
"@actions/core": "^1.2.0",
"@actions/exec": "^1.0.1",
"@actions/glob": "^0.1.0",
"@actions/http-client": "^1.0.8",
"@actions/io": "^1.0.1",
"@actions/cache": "^0.2.1"
"uuid": "^3.3.3"
},
"devDependencies": {
"@types/jest": "^24.0.13",
"@types/nock": "^11.1.0",
"@types/node": "^12.0.4",
"@types/uuid": "^3.4.5",
"@typescript-eslint/eslint-plugin": "^2.7.0",
"@typescript-eslint/parser": "^2.7.0",
"@zeit/ncc": "^0.20.5",

420
src/cacheHttpClient.ts Normal file
View File

@ -0,0 +1,420 @@
import * as core from "@actions/core";
import { HttpClient, HttpCodes } from "@actions/http-client";
import { BearerCredentialHandler } from "@actions/http-client/auth";
import {
IHttpClientResponse,
IRequestOptions,
ITypedResponse
} from "@actions/http-client/interfaces";
import * as crypto from "crypto";
import * as fs from "fs";
import * as stream from "stream";
import * as util from "util";
import { CompressionMethod, Inputs, SocketTimeout } from "./constants";
import {
ArtifactCacheEntry,
CacheOptions,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
} from "./contracts";
import * as utils from "./utils/actionUtils";
const versionSalt = "1.0";
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
function isServerErrorStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return true;
}
return statusCode >= 500;
}
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
""
).replace("pipelines", "artifactcache");
if (!baseUrl) {
throw new Error(
"Cache Service Url not found, unable to restore cache."
);
}
const url = `${baseUrl}_apis/artifactcache/${resource}`;
core.debug(`Resource Url: ${url}`);
return url;
}
function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
headers: {
Accept: createAcceptHeader("application/json", "6.0-preview.1")
}
};
return requestOptions;
}
function createHttpClient(): HttpClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
return new HttpClient(
"actions/cache",
[bearerCredentialHandler],
getRequestOptions()
);
}
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
const components = [core.getInput(Inputs.Path, { required: true })].concat(
compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : []
);
// Add salt to cache version to support breaking changes in cache entry
components.push(versionSalt);
return crypto
.createHash("sha256")
.update(components.join("|"))
.digest("hex");
}
export async function retry<T>(
name: string,
method: () => Promise<T>,
getStatusCode: (T) => number | undefined,
maxAttempts = 2
): Promise<T> {
let response: T | undefined = undefined;
let statusCode: number | undefined = undefined;
let isRetryable = false;
let errorMessage = "";
let attempt = 1;
while (attempt <= maxAttempts) {
try {
response = await method();
statusCode = getStatusCode(response);
if (!isServerErrorStatusCode(statusCode)) {
return response;
}
isRetryable = isRetryableStatusCode(statusCode);
errorMessage = `Cache service responded with ${statusCode}`;
} catch (error) {
isRetryable = true;
errorMessage = error.message;
}
core.debug(
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
);
if (!isRetryable) {
core.debug(`${name} - Error is not retryable`);
break;
}
attempt++;
}
throw Error(`${name} failed: ${errorMessage}`);
}
export async function retryTypedResponse<T>(
name: string,
method: () => Promise<ITypedResponse<T>>,
maxAttempts = 2
): Promise<ITypedResponse<T>> {
return await retry(
name,
method,
(response: ITypedResponse<T>) => response.statusCode,
maxAttempts
);
}
export async function retryHttpClientResponse<T>(
name: string,
method: () => Promise<IHttpClientResponse>,
maxAttempts = 2
): Promise<IHttpClientResponse> {
return await retry(
name,
method,
(response: IHttpClientResponse) => response.message.statusCode,
maxAttempts
);
}
export async function getCacheEntry(
keys: string[],
options?: CacheOptions
): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient();
const version = getCacheVersion(options?.compressionMethod);
const resource = `cache?keys=${encodeURIComponent(
keys.join(",")
)}&version=${version}`;
const response = await retryTypedResponse("getCacheEntry", () =>
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
);
if (response.statusCode === 204) {
return null;
}
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult?.archiveLocation;
if (!cacheDownloadUrl) {
throw new Error("Cache not found.");
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
return cacheResult;
}
async function pipeResponseToStream(
response: IHttpClientResponse,
output: NodeJS.WritableStream
): Promise<void> {
const pipeline = util.promisify(stream.pipeline);
await pipeline(response.message, output);
}
export async function downloadCache(
archiveLocation: string,
archivePath: string
): Promise<void> {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache");
const downloadResponse = await retryHttpClientResponse(
"downloadCache",
() => httpClient.get(archiveLocation)
);
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
downloadResponse.message.destroy();
core.debug(
`Aborting download, socket timed out after ${SocketTimeout} ms`
);
});
await pipeResponseToStream(downloadResponse, stream);
// Validate download size.
const contentLengthHeader =
downloadResponse.message.headers["content-length"];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSize(archivePath);
if (actualLength != expectedLength) {
throw new Error(
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
);
}
} else {
core.debug("Unable to validate download, no Content-Length header");
}
}
// Reserve Cache
export async function reserveCache(
key: string,
options?: CacheOptions
): Promise<number> {
const httpClient = createHttpClient();
const version = getCacheVersion(options?.compressionMethod);
const reserveCacheRequest: ReserveCacheRequest = {
key,
version
};
const response = await retryTypedResponse("reserveCache", () =>
httpClient.postJson<ReserveCacheResponse>(
getCacheApiUrl("caches"),
reserveCacheRequest
)
);
return response?.result?.cacheId ?? -1;
}
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
async function uploadChunk(
httpClient: HttpClient,
resourceUrl: string,
openStream: () => NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
core.debug(
`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
);
const additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
const uploadChunkRequest = async (): Promise<IHttpClientResponse> => {
return await httpClient.sendStream(
"PATCH",
resourceUrl,
openStream(),
additionalHeaders
);
};
await retryHttpClientResponse(
`uploadChunk (start: ${start}, end: ${end})`,
uploadChunkRequest
);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile(
httpClient: HttpClient,
cacheId: number,
archivePath: string
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
MAX_CHUNK_SIZE
);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
await uploadChunk(
httpClient,
resourceUrl,
() =>
fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
}),
start,
end
);
}
})
);
} finally {
fs.closeSync(fd);
}
return;
}
async function commitCache(
httpClient: HttpClient,
cacheId: number,
filesize: number
): Promise<ITypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await retryTypedResponse("commitCache", () =>
httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest
)
);
}
export async function saveCache(
cacheId: number,
archivePath: string
): Promise<void> {
const httpClient = createHttpClient();
core.debug("Upload cache");
await uploadFile(httpClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache(
httpClient,
cacheId,
cacheSize
);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
);
}
core.info("Cache saved successfully");
}

View File

@ -9,8 +9,8 @@ export enum Outputs {
}
export enum State {
CachePrimaryKey = "CACHE_KEY",
CacheMatchedKey = "CACHE_RESULT"
CacheKey = "CACHE_KEY",
CacheResult = "CACHE_RESULT"
}
export enum Events {
@ -19,4 +19,17 @@ export enum Events {
PullRequest = "pull_request"
}
export const RefKey = "GITHUB_REF";
export enum CacheFilename {
Gzip = "cache.tgz",
Zstd = "cache.tzst"
}
export enum CompressionMethod {
Gzip = "gzip",
Zstd = "zstd"
}
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
export const SocketTimeout = 5000;

25
src/contracts.d.ts vendored Normal file
View File

@ -0,0 +1,25 @@
import { CompressionMethod } from "./constants";
export interface ArtifactCacheEntry {
cacheKey?: string;
scope?: string;
creationTime?: string;
archiveLocation?: string;
}
export interface CommitCacheRequest {
size: number;
}
export interface ReserveCacheRequest {
key: string;
version?: string;
}
export interface ReserveCacheResponse {
cacheId: number;
}
export interface CacheOptions {
compressionMethod?: CompressionMethod;
}

View File

@ -1,7 +1,9 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { extractTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
@ -11,49 +13,103 @@ async function run(): Promise<void> {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.`
} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`
);
return;
}
const primaryKey = core.getInput(Inputs.Key, { required: true });
core.saveState(State.CachePrimaryKey, primaryKey);
core.saveState(State.CacheKey, primaryKey);
const restoreKeys = utils.getInputAsArray(Inputs.RestoreKeys);
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const restoreKeys = core
.getInput(Inputs.RestoreKeys)
.split("\n")
.filter(x => x !== "");
const keys = [primaryKey, ...restoreKeys];
try {
const cacheKey = await cache.restoreCache(
cachePaths,
primaryKey,
restoreKeys
core.debug("Resolved Keys:");
core.debug(JSON.stringify(keys));
if (keys.length > 10) {
core.setFailed(
`Key Validation Error: Keys are limited to a maximum of 10.`
);
if (!cacheKey) {
core.info(
`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`
return;
}
for (const key of keys) {
if (key.length > 512) {
core.setFailed(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
);
return;
}
const regex = /^[^,]*$/;
if (!regex.test(key)) {
core.setFailed(
`Key Validation Error: ${key} cannot contain commas.`
);
return;
}
}
// Store the matched cache key
utils.setCacheState(cacheKey);
const compressionMethod = await utils.getCompressionMethod();
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
try {
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
compressionMethod: compressionMethod
});
if (!cacheEntry?.archiveLocation) {
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
return;
}
const archivePath = path.join(
await utils.createTempDirectory(),
utils.getCacheFileName(compressionMethod)
);
core.debug(`Archive Path: ${archivePath}`);
// Store the cache result
utils.setCacheState(cacheEntry);
try {
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(
cacheEntry.archiveLocation,
archivePath
);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
);
await extractTar(archivePath, compressionMethod);
} finally {
// Try to delete the archive to save space
try {
await utils.unlinkFile(archivePath);
} catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
const isExactKeyMatch = utils.isExactKeyMatch(
primaryKey,
cacheEntry
);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheKey}`);
core.info(
`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`
);
} catch (error) {
if (error.name === cache.ValidationError.name) {
throw error;
} else {
utils.logWarning(error.message);
utils.setCacheHitOutput(false);
}
utils.logWarning(error.message);
utils.setCacheHitOutput(false);
}
} catch (error) {
core.setFailed(error.message);

View File

@ -1,7 +1,9 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { createTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
@ -10,7 +12,9 @@ async function run(): Promise<void> {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported because it's not tied to a branch or tag ref.`
} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`
);
return;
}
@ -18,7 +22,7 @@ async function run(): Promise<void> {
const state = utils.getCacheState();
// Inputs are re-evaluted before the post action, so we want the original key used for restore
const primaryKey = core.getState(State.CachePrimaryKey);
const primaryKey = core.getState(State.CacheKey);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
return;
@ -31,21 +35,53 @@ async function run(): Promise<void> {
return;
}
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const compressionMethod = await utils.getCompressionMethod();
try {
await cache.saveCache(cachePaths, primaryKey);
} catch (error) {
if (error.name === cache.ValidationError.name) {
throw error;
} else if (error.name === cache.ReserveCacheError.name) {
core.info(error.message);
} else {
utils.logWarning(error.message);
}
core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
compressionMethod: compressionMethod
});
if (cacheId == -1) {
core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
return;
}
core.debug(`Cache ID: ${cacheId}`);
const cachePaths = await utils.resolvePaths(
core
.getInput(Inputs.Path, { required: true })
.split("\n")
.filter(x => x !== "")
);
core.debug("Cache Paths:");
core.debug(`${JSON.stringify(cachePaths)}`);
const archiveFolder = await utils.createTempDirectory();
const archivePath = path.join(
archiveFolder,
utils.getCacheFileName(compressionMethod)
);
core.debug(`Archive Path: ${archivePath}`);
await createTar(archiveFolder, cachePaths, compressionMethod);
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
);
return;
}
core.debug(`Saving Cache (ID: ${cacheId})`);
await cacheHttpClient.saveCache(cacheId, archivePath);
} catch (error) {
utils.logWarning(error.message);
}

87
src/tar.ts Normal file
View File

@ -0,0 +1,87 @@
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import { existsSync, writeFileSync } from "fs";
import * as path from "path";
import { CompressionMethod } from "./constants";
import * as utils from "./utils/actionUtils";
async function getTarPath(args: string[]): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) {
return systemTar;
} else if (await utils.useGnuTar()) {
args.push("--force-local");
}
}
return await io.which("tar", true);
}
async function execTar(args: string[], cwd?: string): Promise<void> {
try {
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
} catch (error) {
throw new Error(`Tar failed with error: ${error?.message}`);
}
}
function getWorkingDirectory(): string {
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
}
export async function extractTar(
archivePath: string,
compressionMethod: CompressionMethod
): Promise<void> {
// Create directory to extract tar into
const workingDirectory = getWorkingDirectory();
await io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const args = [
...(compressionMethod == CompressionMethod.Zstd
? ["--use-compress-program", "zstd -d --long=30"]
: ["-z"]),
"-xf",
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-P",
"-C",
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
];
await execTar(args);
}
export async function createTar(
archiveFolder: string,
sourceDirectories: string[],
compressionMethod: CompressionMethod
): Promise<void> {
// Write source directories to manifest.txt to avoid command length limits
const manifestFilename = "manifest.txt";
const cacheFileName = utils.getCacheFileName(compressionMethod);
writeFileSync(
path.join(archiveFolder, manifestFilename),
sourceDirectories.join("\n")
);
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
const workingDirectory = getWorkingDirectory();
const args = [
...(compressionMethod == CompressionMethod.Zstd
? ["--use-compress-program", "zstd -T0 --long=30"]
: ["-z"]),
"-cf",
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-P",
"-C",
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
"--files-from",
manifestFilename
];
await execTar(args, archiveFolder);
}

View File

@ -1,35 +1,87 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as glob from "@actions/glob";
import * as io from "@actions/io";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as util from "util";
import * as uuidV4 from "uuid/v4";
import { Outputs, RefKey, State } from "../constants";
import {
CacheFilename,
CompressionMethod,
Events,
Outputs,
State
} from "../constants";
import { ArtifactCacheEntry } from "../contracts";
export function isExactKeyMatch(key: string, cacheKey?: string): boolean {
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
export async function createTempDirectory(): Promise<string> {
const IS_WINDOWS = process.platform === "win32";
let tempDirectory: string = process.env["RUNNER_TEMP"] || "";
if (!tempDirectory) {
let baseLocation: string;
if (IS_WINDOWS) {
// On Windows use the USERPROFILE env variable
baseLocation = process.env["USERPROFILE"] || "C:\\";
} else {
if (process.platform === "darwin") {
baseLocation = "/Users";
} else {
baseLocation = "/home";
}
}
tempDirectory = path.join(baseLocation, "actions", "temp");
}
const dest = path.join(tempDirectory, uuidV4.default());
await io.mkdirP(dest);
return dest;
}
export function getArchiveFileSize(path: string): number {
return fs.statSync(path).size;
}
export function isExactKeyMatch(
key: string,
cacheResult?: ArtifactCacheEntry
): boolean {
return !!(
cacheKey &&
cacheKey.localeCompare(key, undefined, {
cacheResult &&
cacheResult.cacheKey &&
cacheResult.cacheKey.localeCompare(key, undefined, {
sensitivity: "accent"
}) === 0
);
}
export function setCacheState(state: string): void {
core.saveState(State.CacheMatchedKey, state);
export function setCacheState(state: ArtifactCacheEntry): void {
core.saveState(State.CacheResult, JSON.stringify(state));
}
export function setCacheHitOutput(isCacheHit: boolean): void {
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
}
export function setOutputAndState(key: string, cacheKey?: string): void {
setCacheHitOutput(isExactKeyMatch(key, cacheKey));
// Store the matched cache key if it exists
cacheKey && setCacheState(cacheKey);
export function setOutputAndState(
key: string,
cacheResult?: ArtifactCacheEntry
): void {
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
// Store the cache result if it exists
cacheResult && setCacheState(cacheResult);
}
export function getCacheState(): string | undefined {
const cacheKey = core.getState(State.CacheMatchedKey);
if (cacheKey) {
core.debug(`Cache state/key: ${cacheKey}`);
return cacheKey;
export function getCacheState(): ArtifactCacheEntry | undefined {
const stateData = core.getState(State.CacheResult);
core.debug(`State: ${stateData}`);
if (stateData) {
return JSON.parse(stateData) as ArtifactCacheEntry;
}
return undefined;
@ -40,19 +92,81 @@ export function logWarning(message: string): void {
core.info(`${warningPrefix}${message}`);
}
// Cache token authorized for all events that are tied to a ref
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
export function isValidEvent(): boolean {
return RefKey in process.env && Boolean(process.env[RefKey]);
export async function resolvePaths(patterns: string[]): Promise<string[]> {
const paths: string[] = [];
const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
const globber = await glob.create(patterns.join("\n"), {
implicitDescendants: false
});
for await (const file of globber.globGenerator()) {
const relativeFile = path.relative(workspace, file);
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
}
return paths;
}
export function getInputAsArray(
name: string,
options?: core.InputOptions
): string[] {
return core
.getInput(name, options)
.split("\n")
.map(s => s.trim())
.filter(x => x !== "");
export function getSupportedEvents(): string[] {
return [Events.Push, Events.PullRequest];
}
// Currently the cache token is only authorized for push and pull_request events
// All other events will fail when reading and saving the cache
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
export function isValidEvent(): boolean {
const githubEvent = process.env[Events.Key] || "";
return getSupportedEvents().includes(githubEvent);
}
export function unlinkFile(path: fs.PathLike): Promise<void> {
return util.promisify(fs.unlink)(path);
}
async function getVersion(app: string): Promise<string> {
core.debug(`Checking ${app} --version`);
let versionOutput = "";
try {
await exec.exec(`${app} --version`, [], {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data: Buffer): string =>
(versionOutput += data.toString()),
stderr: (data: Buffer): string =>
(versionOutput += data.toString())
}
});
} catch (err) {
core.debug(err.message);
}
versionOutput = versionOutput.trim();
core.debug(versionOutput);
return versionOutput;
}
export async function getCompressionMethod(): Promise<CompressionMethod> {
// Disabling zstd on Windows due to https://github.com/actions/cache/issues/301
if (os.platform() === "win32") {
return CompressionMethod.Gzip;
}
const versionOutput = await getVersion("zstd");
return versionOutput.toLowerCase().includes("zstd command line interface")
? CompressionMethod.Zstd
: CompressionMethod.Gzip;
}
export function getCacheFileName(compressionMethod: CompressionMethod): string {
return compressionMethod == CompressionMethod.Zstd
? CacheFilename.Zstd
: CacheFilename.Gzip;
}
export async function useGnuTar(): Promise<boolean> {
const versionOutput = await getVersion("tar");
return versionOutput.toLowerCase().includes("gnu tar");
}