Compare commits

..

2 Commits

Author SHA1 Message Date
0da99ad140 Preview v0.0.2 Release (#50)
* Create CODE_OF_CONDUCT.md

* Update workflow (#1)

* Run workflow on linux, mac, and windows

* Add status badge

* Use npm install instead

* Bump typescript version

* Use node 12.x

* Add Ruby Gem example (#4)

* Add Cocoapods example (#5)

* Add Carthage example (#10)

* Move examples to their own page (#13)

* Minor typo in README (#15)

from `steps.[ID].outupts.cache-hit` to `steps.[ID].outputs.cache-hit`

* Update README.md

* Prevent commands from executing during tests (#21)

* Prevent commands from executing during tests

* Add newline at end of file

* Drop all commands from output

* Link to NuGet lock files documentation (#20)

* Add trailing dash to Maven fallback key (#19)

* Fix README.md (#25)

`restore-keys` had incorrect indentation.

* Exclude documentation from CI tests (#28)

* Ignore all .md files

* Add note about time-based eviction to README (#30)

* Fix typo in error message (#29)

* Time based eviction interval is 1 week (#34)

* Remove cache checksum debug - close #24 (#26)

* Remove cache checksum debug - close #24

*  Remove cache checksum debug on save

* Fix formatting

* Add Go modules example (#18)

* Add Go modules example

* Fix TOC

* Fix repo name in contact email (#41)

* Add Elixir Mix example (#42)

* Add Elixir Mix example

* Fix typo

* Add cargo example for Rust project (#8)

* Add cargo example

* Add hash of Cargo.lock to keys of caches

* Move Rust example to examples.md

* Stop warning when cache is not found (#40)

The cache not being found is a common situation so very visible warning
is a little too much.

* Bump package version

* Release v0.0.2
2019-11-04 13:39:29 -05:00
e7ad80454a Release preview version 2019-10-30 14:50:22 -04:00
23 changed files with 519 additions and 3516 deletions

View File

@ -1,16 +0,0 @@
{
"env": { "node": true, "jest": true },
"parser": "@typescript-eslint/parser",
"parserOptions": { "ecmaVersion": 2020, "sourceType": "module" },
"extends": [
"eslint:recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"plugin:import/errors",
"plugin:import/warnings",
"plugin:import/typescript",
"plugin:prettier/recommended",
"prettier/@typescript-eslint"
],
"plugins": ["@typescript-eslint", "jest"]
}

View File

@ -1,11 +1,6 @@
name: Tests
on:
pull_request:
branches:
- master
paths-ignore:
- '**.md'
push:
branches:
- master
@ -15,11 +10,9 @@ on:
jobs:
test:
name: Test on ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
runs-on: ${{ matrix.os }}
steps:
@ -29,25 +22,10 @@ jobs:
with:
node-version: '12.x'
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- run: npm ci
- name: Prettier Format Check
run: npm run format-check
- name: ESLint Check
run: npm run lint
- name: Build & Test
run: npm run test

View File

@ -2,11 +2,7 @@
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
## Documentation
See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows).
<a href="https://github.com/actions/cache"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg"></a>
## Usage
@ -28,7 +24,7 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory
### Example workflow
```yaml
name: Caching Primes
name: Example Caching with npm
on: push
@ -39,45 +35,31 @@ jobs:
steps:
- uses: actions/checkout@v1
- name: Cache Primes
id: cache-primes
uses: actions/cache@v1
- name: Cache node modules
uses: actions/cache@preview
with:
path: prime-numbers
key: ${{ runner.os }}-primes
path: node_modules
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Generate Prime Numbers
if: steps.cache-primes.outputs.cache-hit != 'true'
run: /generate-primes.sh -d prime-numbers
- name: Install Dependencies
run: npm install
- name: Use Prime Numbers
run: /primes.sh -d prime-numbers
- name: Build
run: npm run build
- name: Test
run: npm run test
```
## Implementation Examples
Every programming language and framework has its own way of caching.
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
- [C# - Nuget](./examples.md#c---nuget)
- [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules)
- [Java - Gradle](./examples.md#java---gradle)
- [Java - Maven](./examples.md#java---maven)
- [Node - npm](./examples.md#node---npm)
- [Node - Yarn](./examples.md#node---yarn)
- [PHP - Composer](./examples.md#php---composer)
- [Python - pip](./examples.md#python---pip)
- [Ruby - Gem](./examples.md#ruby---gem)
- [Rust - Cargo](./examples.md#rust---cargo)
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
## Ecosystem Examples
See [Examples](examples.md)
## Cache Limits
Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
Individual caches are limited to 200MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
## Skipping steps based on cache-hit
@ -88,7 +70,7 @@ Example:
steps:
- uses: actions/checkout@v1
- uses: actions/cache@v1
- uses: actions/cache@preview
id: cache
with:
path: path/to/dependencies

View File

@ -1 +0,0 @@
hello world

View File

@ -1,236 +0,0 @@
import * as core from "@actions/core";
import * as os from "os";
import * as path from "path";
import { Events, Outputs, State } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import * as actionUtils from "../src/utils/actionUtils";
jest.mock("@actions/core");
jest.mock("os");
afterEach(() => {
delete process.env[Events.Key];
});
test("getArchiveFileSize returns file size", () => {
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
const size = actionUtils.getArchiveFileSize(filePath);
expect(size).toBe(11);
});
test("isExactKeyMatch with undefined cache entry returns false", () => {
const key = "linux-rust";
const cacheEntry = undefined;
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with empty cache entry returns false", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with different keys returns false", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with different key accents returns false", () => {
const key = "linux-áccent";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-accent"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false);
});
test("isExactKeyMatch with same key returns true", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
});
test("isExactKeyMatch with same key and different casing returns true", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "LINUX-RUST"
};
expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true);
});
test("setOutputAndState with undefined entry to set cache-hit output", () => {
const key = "linux-rust";
const cacheEntry = undefined;
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledTimes(0);
});
test("setOutputAndState with exact match to set cache-hit output and state", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust"
};
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledWith(
State.CacheResult,
JSON.stringify(cacheEntry)
);
expect(saveStateMock).toHaveBeenCalledTimes(1);
});
test("setOutputAndState with no exact match to set cache-hit output and state", () => {
const key = "linux-rust";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43"
};
const setOutputMock = jest.spyOn(core, "setOutput");
const saveStateMock = jest.spyOn(core, "saveState");
actionUtils.setOutputAndState(key, cacheEntry);
expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false");
expect(setOutputMock).toHaveBeenCalledTimes(1);
expect(saveStateMock).toHaveBeenCalledWith(
State.CacheResult,
JSON.stringify(cacheEntry)
);
expect(saveStateMock).toHaveBeenCalledTimes(1);
});
test("getCacheState with no state returns undefined", () => {
const getStateMock = jest.spyOn(core, "getState");
getStateMock.mockImplementation(() => {
return "";
});
const state = actionUtils.getCacheState();
expect(state).toBe(undefined);
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
expect(getStateMock).toHaveBeenCalledTimes(1);
});
test("getCacheState with valid state", () => {
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
const getStateMock = jest.spyOn(core, "getState");
getStateMock.mockImplementation(() => {
return JSON.stringify(cacheEntry);
});
const state = actionUtils.getCacheState();
expect(state).toEqual(cacheEntry);
expect(getStateMock).toHaveBeenCalledWith(State.CacheResult);
expect(getStateMock).toHaveBeenCalledTimes(1);
});
test("logWarning logs a message with a warning prefix", () => {
const message = "A warning occurred.";
const infoMock = jest.spyOn(core, "info");
actionUtils.logWarning(message);
expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`);
});
test("isValidEvent returns false for unknown event", () => {
const event = "foo";
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(false);
});
test("resolvePath with no ~ in path", () => {
const filePath = ".cache/yarn";
const resolvedPath = actionUtils.resolvePath(filePath);
const expectedPath = path.resolve(filePath);
expect(resolvedPath).toBe(expectedPath);
});
test("resolvePath with ~ in path", () => {
const filePath = "~/.cache/yarn";
const homedir = jest.requireActual("os").homedir();
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return homedir;
});
const resolvedPath = actionUtils.resolvePath(filePath);
const expectedPath = path.join(homedir, ".cache/yarn");
expect(resolvedPath).toBe(expectedPath);
});
test("resolvePath with home not found", () => {
const filePath = "~/.cache/yarn";
const homedirMock = jest.spyOn(os, "homedir");
homedirMock.mockImplementation(() => {
return "";
});
expect(() => actionUtils.resolvePath(filePath)).toThrow(
"Unable to resolve `~` to HOME"
);
});
test("isValidEvent returns true for push event", () => {
const event = Events.Push;
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});
test("isValidEvent returns true for pull request event", () => {
const event = Events.PullRequest;
process.env[Events.Key] = event;
const isValidEvent = actionUtils.isValidEvent();
expect(isValidEvent).toBe(true);
});

22
__tests__/main.test.ts Normal file
View File

@ -0,0 +1,22 @@
import * as core from "@actions/core";
import { Inputs } from "../src/constants";
import run from "../src/restore";
import * as testUtils from "../src/utils/testUtils";
test("restore with no path", async () => {
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
});
test("restore with no key", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: key"
);
});

View File

@ -1,403 +0,0 @@
import * as core from "@actions/core";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isExactKeyMatch(key, cacheResult);
}
);
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
});
beforeEach(() => {
process.env[Events.Key] = Events.Push;
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
});
test("restore with invalid event outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with no path should fail", async () => {
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
});
test("restore with no key", async () => {
testUtils.setInput(Inputs.Path, "node_modules");
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
"Input required and not supplied: key"
);
});
test("restore with too many keys should fail", async () => {
const key = "node-test";
const restoreKeys = [...Array(20).keys()].map(x => x.toString());
testUtils.setInputs({
path: "node_modules",
key,
restoreKeys
});
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: Keys are limited to a maximum of 10.`
);
});
test("restore with large key should fail", async () => {
const key = "foo".repeat(512); // Over the 512 character limit
testUtils.setInputs({
path: "node_modules",
key
});
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot be larger than 512 characters.`
);
});
test("restore with invalid key should fail", async () => {
const key = "comma,comma";
testUtils.setInputs({
path: "node_modules",
key
});
const failedMock = jest.spyOn(core, "setFailed");
await run();
expect(failedMock).toHaveBeenCalledWith(
`Key Validation Error: ${key} cannot contain commas.`
);
});
test("restore with no cache found", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
key
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
return Promise.resolve(null);
});
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}.`
);
});
test("restore with server error should fail", async () => {
const key = "node-test";
testUtils.setInputs({
path: "node_modules",
key
});
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
throw new Error("HTTP Error Occurred");
});
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with restore keys and no cache found", async () => {
const key = "node-test";
const restoreKey = "node-";
testUtils.setInputs({
path: "node_modules",
key,
restoreKeys: [restoreKey]
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
clientMock.mockImplementation(() => {
return Promise.resolve(null);
});
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(failedMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledWith(
`Cache not found for input keys: ${key}, ${restoreKey}.`
);
});
test("restore with cache found", async () => {
const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 142;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with a pull request event and cache found", async () => {
const key = "node-test";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key
});
process.env[Events.Key] = Events.PullRequest;
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: key,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 62915000;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with cache found for restore key", async () => {
const key = "node-test";
const restoreKey = "node-";
const cachePath = path.resolve("node_modules");
testUtils.setInputs({
path: "node_modules",
key,
restoreKeys: [restoreKey]
});
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: restoreKey,
scope: "refs/heads/master",
archiveLocation: "www.actionscache.test/download"
};
const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry");
getCacheMock.mockImplementation(() => {
return Promise.resolve(cacheEntry);
});
const tempPath = "/foo/bar";
const createTempDirectoryMock = jest.spyOn(
actionUtils,
"createTempDirectory"
);
createTempDirectoryMock.mockImplementation(() => {
return Promise.resolve(tempPath);
});
const archivePath = path.join(tempPath, "cache.tgz");
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
const fileSize = 142;
const getArchiveFileSizeMock = jest
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith(
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(infoMock).toHaveBeenCalledWith(
`Cache restored from key: ${restoreKey}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});

View File

@ -1,378 +0,0 @@
import * as core from "@actions/core";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/core");
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => {
return jest.requireActual("../src/utils/actionUtils").getCacheState();
});
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
return jest
.requireActual("../src/utils/actionUtils")
.isExactKeyMatch(key, cacheResult);
}
);
jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
return path.resolve(filePath);
});
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
return Promise.resolve("/foo/bar");
});
});
beforeEach(() => {
process.env[Events.Key] = Events.Push;
});
afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
});
test("save with invalid event outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const invalidEvent = "commit_comment";
process.env[Events.Key] = invalidEvent;
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with no primary key in state outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return "";
});
await run();
expect(logWarningMock).toHaveBeenCalledWith(
`Error retrieving key from state.`
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with exact match returns early", async () => {
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: primaryKey,
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const createTarMock = jest.spyOn(tar, "createTar");
await run();
expect(infoMock).toHaveBeenCalledWith(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with missing input outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
await run();
expect(logWarningMock).toHaveBeenCalledWith(
"Input required and not supplied: path"
);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with large cache outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize;
});
await run();
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with server error outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest
.spyOn(cacheHttpClient, "saveCache")
.mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred");
});
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
expect(failedMock).toHaveBeenCalledTimes(0);
});

View File

@ -1,58 +0,0 @@
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as tar from "../src/tar";
jest.mock("@actions/exec");
jest.mock("@actions/io");
beforeAll(() => {
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
});
test("extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar";
const targetDirectory = "~/.npm/cache";
await tar.extractTar(archivePath, targetDirectory);
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-xz",
"-f",
archivePath,
"-C",
targetDirectory
]);
});
test("create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar";
const sourceDirectory = "~/.npm/cache";
await tar.createTar(archivePath, sourceDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-cz",
"-f",
archivePath,
"-C",
sourceDirectory,
"."
]);
});

390
dist/restore/index.js vendored
View File

@ -1496,69 +1496,41 @@ const fs = __importStar(__webpack_require__(747));
const Handlers_1 = __webpack_require__(941);
const HttpClient_1 = __webpack_require__(874);
const RestClient_1 = __webpack_require__(105);
const utils = __importStar(__webpack_require__(443));
function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode) {
const retryableStatusCodes = [
HttpClient_1.HttpCodes.BadGateway,
HttpClient_1.HttpCodes.ServiceUnavailable,
HttpClient_1.HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl() {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
"").replace("pipelines", "artifactcache");
if (!baseUrl) {
throw new Error("Cache Service Url not found, unable to restore cache.");
}
core.debug(`Cache Url: ${baseUrl}`);
return `${baseUrl}_apis/artifactcache/`;
}
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions() {
const requestOptions = {
acceptHeader: createAcceptHeader("application/json", "6.0-preview.1")
};
return requestOptions;
}
function createRestClient() {
function getCacheEntry(keys) {
return __awaiter(this, void 0, void 0, function* () {
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token);
return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [
const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`;
const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [
bearerCredentialHandler
]);
}
function getCacheEntry(keys) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
const response = yield restClient.get(resource, getRequestOptions());
if (response.statusCode === 204) {
return null;
}
if (!isSuccessStatusCode(response.statusCode)) {
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation;
if (!cacheDownloadUrl) {
throw new Error("Cache not found.");
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
if (!cacheResult || !cacheResult.archiveLocation) {
throw new Error("Cache not found.");
}
return cacheResult;
});
}
exports.getCacheEntry = getCacheEntry;
function downloadCache(cacheEntry, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient_1.HttpClient("actions/cache");
const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation);
yield pipeResponseToStream(downloadResponse, stream);
});
}
exports.downloadCache = downloadCache;
function pipeResponseToStream(response, stream) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => {
@ -1568,129 +1540,48 @@ function pipeResponseToStream(response, stream) {
});
});
}
function downloadCache(archiveLocation, archivePath) {
function saveCache(stream, key) {
return __awaiter(this, void 0, void 0, function* () {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient_1.HttpClient("actions/cache");
const downloadResponse = yield httpClient.get(archiveLocation);
yield pipeResponseToStream(downloadResponse, stream);
});
}
exports.downloadCache = downloadCache;
// Reserve Cache
function reserveCache(key) {
var _a, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
const reserveCacheRequest = {
key
};
const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions());
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
});
}
exports.reserveCache = reserveCache;
function getContentRange(start, end) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
function uploadChunk(restClient, resourceUrl, data, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token);
const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
const postUrl = cacheUrl + resource;
const restClient = new RestClient_1.RestClient("actions/cache", undefined, [
bearerCredentialHandler
]);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
"Content-Type": "application/octet-stream"
};
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions);
});
const response = yield uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`);
const retryResponse = yield uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
return;
}
}
throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`);
});
}
function parseEnvNumber(key) {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
function uploadFile(restClient, cacheId, archivePath) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const fd = fs.openSync(archivePath, "r");
const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY"), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE"), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
yield uploadChunk(restClient, resourceUrl, chunk, start, end);
}
})));
}
finally {
fs.closeSync(fd);
}
return;
});
}
function commitCache(restClient, cacheId, filesize) {
return __awaiter(this, void 0, void 0, function* () {
const requestOptions = getRequestOptions();
const commitCacheRequest = { size: filesize };
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
});
}
function saveCache(cacheId, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
core.debug("Upload cache");
yield uploadFile(restClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions);
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
core.info("Cache saved successfully");
});
}
exports.saveCache = saveCache;
function getRequestOptions() {
const requestOptions = {
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
};
return requestOptions;
}
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getCacheUrl() {
// Ideally we just use ACTIONS_CACHE_URL
let cacheUrl = (process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
"").replace("pipelines", "artifactcache");
if (!cacheUrl) {
throw new Error("Cache Service Url not found, unable to restore cache.");
}
core.debug(`Cache Url: ${cacheUrl}`);
return cacheUrl;
}
/***/ }),
@ -2248,7 +2139,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const io = __importStar(__webpack_require__(1));
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const uuidV4 = __importStar(__webpack_require__(826));
@ -2280,10 +2170,6 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSize(path) {
return fs.statSync(path).size;
}
exports.getArchiveFileSize = getArchiveFileSize;
function isExactKeyMatch(key, cacheResult) {
return !!(cacheResult &&
cacheResult.cacheKey &&
@ -2292,14 +2178,6 @@ function isExactKeyMatch(key, cacheResult) {
}) === 0);
}
exports.isExactKeyMatch = isExactKeyMatch;
function setCacheState(state) {
core.saveState(constants_1.State.CacheResult, JSON.stringify(state));
}
exports.setCacheState = setCacheState;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function setOutputAndState(key, cacheResult) {
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
// Store the cache result if it exists
@ -2309,17 +2187,17 @@ exports.setOutputAndState = setOutputAndState;
function getCacheState() {
const stateData = core.getState(constants_1.State.CacheResult);
core.debug(`State: ${stateData}`);
if (stateData) {
return JSON.parse(stateData);
}
return undefined;
return (stateData && JSON.parse(stateData));
}
exports.getCacheState = getCacheState;
function logWarning(message) {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
function setCacheState(state) {
core.saveState(constants_1.State.CacheResult, JSON.stringify(state));
}
exports.logWarning = logWarning;
exports.setCacheState = setCacheState;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function resolvePath(filePath) {
if (filePath[0] === "~") {
const home = os.homedir();
@ -2331,18 +2209,6 @@ function resolvePath(filePath) {
return path.resolve(filePath);
}
exports.resolvePath = resolvePath;
function getSupportedEvents() {
return [constants_1.Events.Push, constants_1.Events.PullRequest];
}
exports.getSupportedEvents = getSupportedEvents;
// Currently the cache token is only authorized for push and pull_request events
// All other events will fail when reading and saving the cache
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
function isValidEvent() {
const githubEvent = process.env[constants_1.Events.Key] || "";
return getSupportedEvents().includes(githubEvent);
}
exports.isValidEvent = isValidEvent;
/***/ }),
@ -2970,25 +2836,19 @@ function isUnixExecutable(stats) {
Object.defineProperty(exports, "__esModule", { value: true });
var Inputs;
(function (Inputs) {
Inputs["Key"] = "key";
Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys";
Inputs.Key = "key";
Inputs.Path = "path";
Inputs.RestoreKeys = "restore-keys";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs;
(function (Outputs) {
Outputs["CacheHit"] = "cache-hit";
Outputs.CacheHit = "cache-hit";
})(Outputs = exports.Outputs || (exports.Outputs = {}));
var State;
(function (State) {
State["CacheKey"] = "CACHE_KEY";
State["CacheResult"] = "CACHE_RESULT";
State.CacheKey = "CACHE_KEY";
State.CacheResult = "CACHE_RESULT";
})(State = exports.State || (exports.State = {}));
var Events;
(function (Events) {
Events["Key"] = "GITHUB_EVENT_NAME";
Events["Push"] = "push";
Events["PullRequest"] = "pull_request";
})(Events = exports.Events || (exports.Events = {}));
/***/ }),
@ -3097,30 +2957,22 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const fs = __importStar(__webpack_require__(747));
const path = __importStar(__webpack_require__(622));
const cacheHttpClient = __importStar(__webpack_require__(154));
const constants_1 = __webpack_require__(694);
const tar_1 = __webpack_require__(943);
const utils = __importStar(__webpack_require__(443));
function run() {
var _a;
return __awaiter(this, void 0, void 0, function* () {
try {
// Validate inputs, this can cause task failure
if (!utils.isValidEvent()) {
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`);
return;
}
const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
core.debug(`Cache Path: ${cachePath}`);
const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true });
core.saveState(constants_1.State.CacheKey, primaryKey);
const restoreKeys = core
.getInput(constants_1.Inputs.RestoreKeys)
.split("\n")
.filter(x => x !== "");
const restoreKeys = core.getInput(constants_1.Inputs.RestoreKeys).split("\n");
const keys = [primaryKey, ...restoreKeys];
core.debug("Resolved Keys:");
core.debug(JSON.stringify(keys));
@ -3141,25 +2993,38 @@ function run() {
}
try {
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys);
if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) {
core.info(`Cache not found for input keys: ${keys.join(", ")}.`);
if (!cacheEntry) {
core.info(`Cache not found for input keys: ${JSON.stringify(keys)}.`);
return;
}
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
core.debug(`Archive Path: ${archivePath}`);
// Store the cache result
utils.setCacheState(cacheEntry);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, cachePath);
yield cacheHttpClient.downloadCache(cacheEntry, archivePath);
io.mkdirP(cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const args = ["-xz"];
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
args.push("--force-local");
archivePath = archivePath.replace(/\\/g, "/");
cachePath = cachePath.replace(/\\/g, "/");
}
args.push(...["-f", archivePath, "-C", cachePath]);
const tarPath = yield io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
const archiveFileSize = fs.statSync(archivePath).size;
core.debug(`File Size: ${archiveFileSize}`);
yield exec_1.exec(`"${tarPath}"`, args);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`);
}
catch (error) {
utils.logWarning(error.message);
core.warning(error.message);
utils.setCacheHitOutput(false);
}
}
@ -5249,79 +5114,6 @@ var personalaccesstoken_1 = __webpack_require__(327);
exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler;
/***/ }),
/***/ 943:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747);
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (fs_1.existsSync(systemTar)) {
return systemTar;
}
}
return yield io.which("tar", true);
});
}
function execTar(args) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
yield exec_1.exec(`"${yield getTarPath()}"`, args);
}
catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`);
}
throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`);
}
});
}
function extractTar(archivePath, targetDirectory) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
yield io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
yield execTar(args);
});
}
exports.extractTar = extractTar;
function createTar(archivePath, sourceDirectory) {
return __awaiter(this, void 0, void 0, function* () {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
yield execTar(args);
});
}
exports.createTar = createTar;
/***/ }),
/***/ 986:

392
dist/save/index.js vendored
View File

@ -1496,69 +1496,41 @@ const fs = __importStar(__webpack_require__(747));
const Handlers_1 = __webpack_require__(941);
const HttpClient_1 = __webpack_require__(874);
const RestClient_1 = __webpack_require__(105);
const utils = __importStar(__webpack_require__(443));
function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode) {
const retryableStatusCodes = [
HttpClient_1.HttpCodes.BadGateway,
HttpClient_1.HttpCodes.ServiceUnavailable,
HttpClient_1.HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl() {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
"").replace("pipelines", "artifactcache");
if (!baseUrl) {
throw new Error("Cache Service Url not found, unable to restore cache.");
}
core.debug(`Cache Url: ${baseUrl}`);
return `${baseUrl}_apis/artifactcache/`;
}
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions() {
const requestOptions = {
acceptHeader: createAcceptHeader("application/json", "6.0-preview.1")
};
return requestOptions;
}
function createRestClient() {
function getCacheEntry(keys) {
return __awaiter(this, void 0, void 0, function* () {
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token);
return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [
const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`;
const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [
bearerCredentialHandler
]);
}
function getCacheEntry(keys) {
var _a;
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
const response = yield restClient.get(resource, getRequestOptions());
if (response.statusCode === 204) {
return null;
}
if (!isSuccessStatusCode(response.statusCode)) {
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation;
if (!cacheDownloadUrl) {
throw new Error("Cache not found.");
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
if (!cacheResult || !cacheResult.archiveLocation) {
throw new Error("Cache not found.");
}
return cacheResult;
});
}
exports.getCacheEntry = getCacheEntry;
function downloadCache(cacheEntry, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient_1.HttpClient("actions/cache");
const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation);
yield pipeResponseToStream(downloadResponse, stream);
});
}
exports.downloadCache = downloadCache;
function pipeResponseToStream(response, stream) {
return __awaiter(this, void 0, void 0, function* () {
return new Promise(resolve => {
@ -1568,129 +1540,48 @@ function pipeResponseToStream(response, stream) {
});
});
}
function downloadCache(archiveLocation, archivePath) {
function saveCache(stream, key) {
return __awaiter(this, void 0, void 0, function* () {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient_1.HttpClient("actions/cache");
const downloadResponse = yield httpClient.get(archiveLocation);
yield pipeResponseToStream(downloadResponse, stream);
});
}
exports.downloadCache = downloadCache;
// Reserve Cache
function reserveCache(key) {
var _a, _b, _c;
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
const reserveCacheRequest = {
key
};
const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions());
return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1);
});
}
exports.reserveCache = reserveCache;
function getContentRange(start, end) {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
function uploadChunk(restClient, resourceUrl, data, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token);
const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
const postUrl = cacheUrl + resource;
const restClient = new RestClient_1.RestClient("actions/cache", undefined, [
bearerCredentialHandler
]);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
"Content-Type": "application/octet-stream"
};
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions);
});
const response = yield uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`);
const retryResponse = yield uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
return;
}
}
throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`);
});
}
function parseEnvNumber(key) {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
function uploadFile(restClient, cacheId, archivePath) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const fd = fs.openSync(archivePath, "r");
const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY"), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE"), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
yield uploadChunk(restClient, resourceUrl, chunk, start, end);
}
})));
}
finally {
fs.closeSync(fd);
}
return;
});
}
function commitCache(restClient, cacheId, filesize) {
return __awaiter(this, void 0, void 0, function* () {
const requestOptions = getRequestOptions();
const commitCacheRequest = { size: filesize };
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
});
}
function saveCache(cacheId, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
core.debug("Upload cache");
yield uploadFile(restClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions);
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
core.info("Cache saved successfully");
});
}
exports.saveCache = saveCache;
function getRequestOptions() {
const requestOptions = {
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
};
return requestOptions;
}
function createAcceptHeader(type, apiVersion) {
return `${type};api-version=${apiVersion}`;
}
function getCacheUrl() {
// Ideally we just use ACTIONS_CACHE_URL
let cacheUrl = (process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
"").replace("pipelines", "artifactcache");
if (!cacheUrl) {
throw new Error("Cache Service Url not found, unable to restore cache.");
}
core.debug(`Cache Url: ${cacheUrl}`);
return cacheUrl;
}
/***/ }),
@ -2248,7 +2139,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const io = __importStar(__webpack_require__(1));
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const uuidV4 = __importStar(__webpack_require__(826));
@ -2280,10 +2170,6 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSize(path) {
return fs.statSync(path).size;
}
exports.getArchiveFileSize = getArchiveFileSize;
function isExactKeyMatch(key, cacheResult) {
return !!(cacheResult &&
cacheResult.cacheKey &&
@ -2292,14 +2178,6 @@ function isExactKeyMatch(key, cacheResult) {
}) === 0);
}
exports.isExactKeyMatch = isExactKeyMatch;
function setCacheState(state) {
core.saveState(constants_1.State.CacheResult, JSON.stringify(state));
}
exports.setCacheState = setCacheState;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function setOutputAndState(key, cacheResult) {
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
// Store the cache result if it exists
@ -2309,17 +2187,17 @@ exports.setOutputAndState = setOutputAndState;
function getCacheState() {
const stateData = core.getState(constants_1.State.CacheResult);
core.debug(`State: ${stateData}`);
if (stateData) {
return JSON.parse(stateData);
}
return undefined;
return (stateData && JSON.parse(stateData));
}
exports.getCacheState = getCacheState;
function logWarning(message) {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
function setCacheState(state) {
core.saveState(constants_1.State.CacheResult, JSON.stringify(state));
}
exports.logWarning = logWarning;
exports.setCacheState = setCacheState;
function setCacheHitOutput(isCacheHit) {
core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString());
}
exports.setCacheHitOutput = setCacheHitOutput;
function resolvePath(filePath) {
if (filePath[0] === "~") {
const home = os.homedir();
@ -2331,18 +2209,6 @@ function resolvePath(filePath) {
return path.resolve(filePath);
}
exports.resolvePath = resolvePath;
function getSupportedEvents() {
return [constants_1.Events.Push, constants_1.Events.PullRequest];
}
exports.getSupportedEvents = getSupportedEvents;
// Currently the cache token is only authorized for push and pull_request events
// All other events will fail when reading and saving the cache
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
function isValidEvent() {
const githubEvent = process.env[constants_1.Events.Key] || "";
return getSupportedEvents().includes(githubEvent);
}
exports.isValidEvent = isValidEvent;
/***/ }),
@ -2985,55 +2851,56 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const fs = __importStar(__webpack_require__(747));
const path = __importStar(__webpack_require__(622));
const cacheHttpClient = __importStar(__webpack_require__(154));
const constants_1 = __webpack_require__(694);
const tar_1 = __webpack_require__(943);
const utils = __importStar(__webpack_require__(443));
function run() {
return __awaiter(this, void 0, void 0, function* () {
try {
if (!utils.isValidEvent()) {
utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`);
return;
}
const state = utils.getCacheState();
// Inputs are re-evaluted before the post action, so we want the original key used for restore
const primaryKey = core.getState(constants_1.State.CacheKey);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
core.warning(`Error retrieving key from state.`);
return;
}
if (utils.isExactKeyMatch(primaryKey, state)) {
core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`);
return;
}
core.debug("Reserving Cache");
const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
if (cacheId == -1) {
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
return;
}
core.debug(`Cache ID: ${cacheId}`);
const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true }));
core.debug(`Cache Path: ${cachePath}`);
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archivePath, cachePath);
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const args = ["-cz"];
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
args.push("--force-local");
archivePath = archivePath.replace(/\\/g, "/");
cachePath = cachePath.replace(/\\/g, "/");
}
args.push(...["-f", archivePath, "-C", cachePath, "."]);
const tarPath = yield io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
yield exec_1.exec(`"${tarPath}"`, args);
const fileSizeLimit = 200 * 1024 * 1024; // 200MB
const archiveFileSize = fs.statSync(archivePath).size;
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`);
core.warning(`Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.`);
return;
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath);
const stream = fs.createReadStream(archivePath);
yield cacheHttpClient.saveCache(stream, primaryKey);
}
catch (error) {
utils.logWarning(error.message);
core.warning(error.message);
}
});
}
@ -3051,25 +2918,19 @@ exports.default = run;
Object.defineProperty(exports, "__esModule", { value: true });
var Inputs;
(function (Inputs) {
Inputs["Key"] = "key";
Inputs["Path"] = "path";
Inputs["RestoreKeys"] = "restore-keys";
Inputs.Key = "key";
Inputs.Path = "path";
Inputs.RestoreKeys = "restore-keys";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
var Outputs;
(function (Outputs) {
Outputs["CacheHit"] = "cache-hit";
Outputs.CacheHit = "cache-hit";
})(Outputs = exports.Outputs || (exports.Outputs = {}));
var State;
(function (State) {
State["CacheKey"] = "CACHE_KEY";
State["CacheResult"] = "CACHE_RESULT";
State.CacheKey = "CACHE_KEY";
State.CacheResult = "CACHE_RESULT";
})(State = exports.State || (exports.State = {}));
var Events;
(function (Events) {
Events["Key"] = "GITHUB_EVENT_NAME";
Events["Push"] = "push";
Events["PullRequest"] = "pull_request";
})(Events = exports.Events || (exports.Events = {}));
/***/ }),
@ -5230,79 +5091,6 @@ var personalaccesstoken_1 = __webpack_require__(327);
exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler;
/***/ }),
/***/ 943:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747);
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (fs_1.existsSync(systemTar)) {
return systemTar;
}
}
return yield io.which("tar", true);
});
}
function execTar(args) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
yield exec_1.exec(`"${yield getTarPath()}"`, args);
}
catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`);
}
throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`);
}
});
}
function extractTar(archivePath, targetDirectory) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
yield io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
yield execTar(args);
});
}
exports.extractTar = extractTar;
function createTar(archivePath, sourceDirectory) {
return __awaiter(this, void 0, void 0, function* () {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
yield execTar(args);
});
}
exports.createTar = createTar;
/***/ }),
/***/ 986:

View File

@ -1,24 +1,43 @@
# Examples
- [C# - NuGet](#c---nuget)
- [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules)
- [Java - Gradle](#java---gradle)
- [Java - Maven](#java---maven)
- [Node - npm](#node---npm)
- [Node - Yarn](#node---yarn)
- [PHP - Composer](#php---composer)
- [Python - pip](#python---pip)
- [Ruby - Gem](#ruby---gem)
- [Rust - Cargo](#rust---cargo)
- [C# - Nuget](#c---nuget)
- [Java - Gradle](#java---gradle)
- [Java - Maven](#java---maven)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
- [Ruby - Gem](#ruby---gem)
- [Go - Modules](#go---modules)
- [Elixir - Mix](#elixir---mix)
- [Rust - Cargo](#rust---cargo)
## C# - NuGet
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
## Node - npm
```yaml
- uses: actions/cache@v1
- uses: actions/cache@preview
with:
path: node_modules
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
## Node - Yarn
```yaml
- uses: actions/cache@preview
with:
path: ~/.cache/yarn
key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }}
restore-keys: |
${{ runner.os }}-yarn-
```
## C# - Nuget
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml
- uses: actions/cache@preview
with:
path: ~/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
@ -26,46 +45,10 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
${{ runner.os }}-nuget-
```
Depending on the environment, huge packages might be pre-installed in the global cache folder.
If you do not want to include them, consider to move the cache folder like below.
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
```yaml
env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps:
- uses: actions/cache@v1
with:
path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
## Elixir - Mix
```yaml
- uses: actions/cache@v1
with:
path: deps
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
restore-keys: |
${{ runner.os }}-mix-
```
## Go - Modules
```yaml
- uses: actions/cache@v1
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
```
## Java - Gradle
```yaml
- uses: actions/cache@v1
- uses: actions/cache@preview
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
@ -76,7 +59,7 @@ steps:
## Java - Maven
```yaml
- uses: actions/cache@v1
- uses: actions/cache@preview
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
@ -84,189 +67,10 @@ steps:
${{ runner.os }}-maven-
```
## Node - npm
For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache
>Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci`
### macOS and Ubuntu
```yaml
- uses: actions/cache@v1
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
### Windows
```yaml
- uses: actions/cache@v1
with:
path: ~\AppData\Roaming\npm-cache
key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
### Using multiple systems and `npm config`
```yaml
- name: Get npm cache directory
id: npm-cache
run: |
echo "::set-output name=dir::$(npm config get cache)"
- uses: actions/cache@v1
with:
path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
```
## Node - Yarn
The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info.
```yaml
- name: Get yarn cache
id: yarn-cache
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v1
with:
path: ${{ steps.yarn-cache.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
```
## PHP - Composer
```yaml
- name: Get Composer Cache Directory
id: composer-cache
run: |
echo "::set-output name=dir::$(composer config cache-files-dir)"
- uses: actions/cache@v1
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: |
${{ runner.os }}-composer-
```
## Python - pip
For pip, the cache directory will vary by OS. See https://pip.pypa.io/en/stable/reference/pip_install/#caching
Locations:
- Ubuntu: `~/.cache/pip`
- Windows: `~\AppData\Local\pip\Cache`
- macOS: `~/Library/Caches/pip`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Caches/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
### Using a script to get cache location
> Note: This uses an internal pip API and may not always work
```yaml
- name: Get pip cache
id: pip-cache
run: |
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
- uses: actions/cache@v1
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
```
## Ruby - Gem
```yaml
- uses: actions/cache@v1
with:
path: vendor/bundle
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
${{ runner.os }}-gem-
```
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
```yaml
- name: Bundle install
run: |
bundle config path vendor/bundle
bundle install --jobs 4 --retry 3
```
## Rust - Cargo
```yaml
- name: Cache cargo registry
uses: actions/cache@v1
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@v1
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@v1
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
```
## Swift, Objective-C - Carthage
```yaml
- uses: actions/cache@v1
uses: actions/cache@preview
with:
path: Carthage
key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }}
@ -277,10 +81,62 @@ When dependencies are installed later in the workflow, we must specify the same
## Swift, Objective-C - CocoaPods
```yaml
- uses: actions/cache@v1
- uses: actions/cache@preview
with:
path: Pods
key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }}
restore-keys: |
${{ runner.os }}-pods-
```
## Ruby - Gem
```yaml
- uses: actions/cache@preview
with:
path: vendor/bundle
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: |
${{ runner.os }}-gem-
```
## Go - Modules
```yaml
- uses: actions/cache@preview
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
```
## Elixir - Mix
```yaml
- uses: actions/cache@preview
with:
path: deps
key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}
restore-keys: |
${{ runner.os }}-mix-
```
## Rust - Cargo
```
- name: Cache cargo registry
uses: actions/cache@preview
with:
path: ~/.cargo/registry
key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo index
uses: actions/cache@preview
with:
path: ~/.cargo/git
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
- name: Cache cargo build
uses: actions/cache@preview
with:
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
```

View File

@ -1,23 +1,20 @@
require("nock").disableNetConnect();
module.exports = {
clearMocks: true,
moduleFileExtensions: ["js", "ts"],
testEnvironment: "node",
testMatch: ["**/*.test.ts"],
testRunner: "jest-circus/runner",
moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node',
testMatch: ['**/*.test.ts'],
testRunner: 'jest-circus/runner',
transform: {
"^.+\\.ts$": "ts-jest"
'^.+\\.ts$': 'ts-jest'
},
verbose: true
};
}
const processStdoutWrite = process.stdout.write.bind(process.stdout);
// eslint-disable-next-line @typescript-eslint/explicit-function-return-type
const processStdoutWrite = process.stdout.write.bind(process.stdout)
process.stdout.write = (str, encoding, cb) => {
// Core library will directly call process.stdout.write for commands
// We don't want :: commands to be executed by the runner during tests
if (!str.match(/^::/)) {
return processStdoutWrite(str, encoding, cb);
}
};
}

1057
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,12 @@
{
"name": "cache",
"version": "1.1.0",
"version": "0.0.2",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
"scripts": {
"build": "tsc",
"test": "tsc --noEmit && jest --coverage",
"lint": "eslint **/*.ts --cache",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/"
@ -32,22 +31,13 @@
},
"devDependencies": {
"@types/jest": "^24.0.13",
"@types/nock": "^11.1.0",
"@types/node": "^12.0.4",
"@types/uuid": "^3.4.5",
"@typescript-eslint/eslint-plugin": "^2.7.0",
"@typescript-eslint/parser": "^2.7.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.6.0",
"eslint-config-prettier": "^6.5.0",
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-jest": "^23.0.3",
"eslint-plugin-prettier": "^3.1.1",
"jest": "^24.8.0",
"jest-circus": "^24.7.1",
"nock": "^11.7.0",
"prettier": "^1.19.1",
"prettier": "1.18.2",
"ts-jest": "^24.0.2",
"typescript": "^3.7.3"
"typescript": "^3.6.4"
}
}

View File

@ -1,77 +1,27 @@
import * as core from "@actions/core";
import * as fs from "fs";
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
import { HttpClient, HttpCodes } from "typed-rest-client/HttpClient";
import { HttpClient } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import {
IRequestOptions,
RestClient,
IRestResponse
} from "typed-rest-client/RestClient";
import {
ArtifactCacheEntry,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
} from "./contracts";
import * as utils from "./utils/actionUtils";
import { RestClient, IRequestOptions } from "typed-rest-client/RestClient";
function isSuccessStatusCode(statusCode: number): boolean {
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode: number): boolean {
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(): string {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
""
).replace("pipelines", "artifactcache");
if (!baseUrl) {
throw new Error(
"Cache Service Url not found, unable to restore cache."
);
}
core.debug(`Cache Url: ${baseUrl}`);
return `${baseUrl}_apis/artifactcache/`;
}
function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`;
}
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
acceptHeader: createAcceptHeader("application/json", "6.0-preview.1")
};
return requestOptions;
}
function createRestClient(): RestClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
return new RestClient("actions/cache", getCacheApiUrl(), [
bearerCredentialHandler
]);
}
import { ArtifactCacheEntry } from "./contracts";
export async function getCacheEntry(
keys: string[]
): Promise<ArtifactCacheEntry | null> {
const restClient = createRestClient();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(
keys.join(",")
)}`;
const restClient = new RestClient("actions/cache", cacheUrl, [
bearerCredentialHandler
]);
const response = await restClient.get<ArtifactCacheEntry>(
resource,
@ -80,21 +30,29 @@ export async function getCacheEntry(
if (response.statusCode === 204) {
return null;
}
if (!isSuccessStatusCode(response.statusCode)) {
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
const cacheResult = response.result;
const cacheDownloadUrl = cacheResult?.archiveLocation;
if (!cacheDownloadUrl) {
throw new Error("Cache not found.");
}
core.setSecret(cacheDownloadUrl);
core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult));
if (!cacheResult || !cacheResult.archiveLocation) {
throw new Error("Cache not found.");
}
return cacheResult;
}
export async function downloadCache(
cacheEntry: ArtifactCacheEntry,
archivePath: string
): Promise<void> {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache");
const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
await pipeResponseToStream(downloadResponse, stream);
}
async function pipeResponseToStream(
response: IHttpClientResponse,
stream: NodeJS.WritableStream
@ -106,188 +64,61 @@ async function pipeResponseToStream(
});
}
export async function downloadCache(
archiveLocation: string,
archivePath: string
): Promise<void> {
const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache");
const downloadResponse = await httpClient.get(archiveLocation);
await pipeResponseToStream(downloadResponse, stream);
}
export async function saveCache(stream: NodeJS.ReadableStream, key: string) {
const cacheUrl = getCacheUrl();
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
// Reserve Cache
export async function reserveCache(key: string): Promise<number> {
const restClient = createRestClient();
const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
const postUrl = cacheUrl + resource;
const reserveCacheRequest: ReserveCacheRequest = {
key
};
const response = await restClient.create<ReserveCacheResponse>(
"caches",
reserveCacheRequest,
getRequestOptions()
);
const restClient = new RestClient("actions/cache", undefined, [
bearerCredentialHandler
]);
return response?.result?.cacheId ?? -1;
}
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
async function uploadChunk(
restClient: RestClient,
resourceUrl: string,
data: NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
core.debug(
`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
"Content-Type": "application/octet-stream"
};
const uploadChunkRequest = async (): Promise<IRestResponse<void>> => {
return await restClient.uploadStream<void>(
"PATCH",
resourceUrl,
data,
const response = await restClient.uploadStream<void>(
"POST",
postUrl,
stream,
requestOptions
);
};
const response = await uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
core.debug(
`Received ${response.statusCode}, retrying chunk at offset ${start}.`
);
const retryResponse = await uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
return;
}
}
throw new Error(
`Cache service responded with ${response.statusCode} during chunk upload.`
);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile(
restClient: RestClient,
cacheId: number,
archivePath: string
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
MAX_CHUNK_SIZE
);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
await uploadChunk(
restClient,
resourceUrl,
chunk,
start,
end
);
}
})
);
} finally {
fs.closeSync(fd);
}
return;
}
async function commitCache(
restClient: RestClient,
cacheId: number,
filesize: number
): Promise<IRestResponse<void>> {
const requestOptions = getRequestOptions();
const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await restClient.create(
`caches/${cacheId.toString()}`,
commitCacheRequest,
requestOptions
);
}
export async function saveCache(
cacheId: number,
archivePath: string
): Promise<void> {
const restClient = createRestClient();
core.debug("Upload cache");
await uploadFile(restClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache(
restClient,
cacheId,
cacheSize
);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
throw new Error(
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
);
if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`);
}
core.info("Cache saved successfully");
}
function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = {
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
};
return requestOptions;
}
function createAcceptHeader(type: string, apiVersion: string): string {
return `${type};api-version=${apiVersion}`;
}
function getCacheUrl(): string {
// Ideally we just use ACTIONS_CACHE_URL
let cacheUrl: string = (
process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] ||
""
).replace("pipelines", "artifactcache");
if (!cacheUrl) {
throw new Error(
"Cache Service Url not found, unable to restore cache."
);
}
core.debug(`Cache Url: ${cacheUrl}`);
return cacheUrl;
}

View File

@ -1,20 +1,14 @@
export enum Inputs {
Key = "key",
Path = "path",
RestoreKeys = "restore-keys"
export namespace Inputs {
export const Key = "key";
export const Path = "path";
export const RestoreKeys = "restore-keys";
}
export enum Outputs {
CacheHit = "cache-hit"
export namespace Outputs {
export const CacheHit = "cache-hit";
}
export enum State {
CacheKey = "CACHE_KEY",
CacheResult = "CACHE_RESULT"
}
export enum Events {
Key = "GITHUB_EVENT_NAME",
Push = "push",
PullRequest = "pull_request"
export namespace State {
export const CacheKey = "CACHE_KEY";
export const CacheResult = "CACHE_RESULT";
}

13
src/contracts.d.ts vendored
View File

@ -4,16 +4,3 @@ export interface ArtifactCacheEntry {
creationTime?: string;
archiveLocation?: string;
}
export interface CommitCacheRequest {
size: number;
}
export interface ReserveCacheRequest {
key: string;
version?: string;
}
export interface ReserveCacheResponse {
cacheId: number;
}

View File

@ -1,25 +1,18 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as fs from "fs";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { extractTar } from "./tar";
import { Inputs, State } from "./constants";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
async function run() {
try {
// Validate inputs, this can cause task failure
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`
);
return;
}
const cachePath = utils.resolvePath(
let cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
@ -27,10 +20,7 @@ async function run(): Promise<void> {
const primaryKey = core.getInput(Inputs.Key, { required: true });
core.saveState(State.CacheKey, primaryKey);
const restoreKeys = core
.getInput(Inputs.RestoreKeys)
.split("\n")
.filter(x => x !== "");
const restoreKeys = core.getInput(Inputs.RestoreKeys).split("\n");
const keys = [primaryKey, ...restoreKeys];
core.debug("Resolved Keys:");
@ -60,14 +50,14 @@ async function run(): Promise<void> {
try {
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
if (!cacheEntry?.archiveLocation) {
if (!cacheEntry) {
core.info(
`Cache not found for input keys: ${keys.join(", ")}.`
`Cache not found for input keys: ${JSON.stringify(keys)}.`
);
return;
}
const archivePath = path.join(
let archivePath = path.join(
await utils.createTempDirectory(),
"cache.tgz"
);
@ -77,19 +67,29 @@ async function run(): Promise<void> {
utils.setCacheState(cacheEntry);
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(
cacheEntry.archiveLocation,
archivePath
);
await cacheHttpClient.downloadCache(cacheEntry, archivePath);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(
`Cache Size: ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B)`
);
io.mkdirP(cachePath);
await extractTar(archivePath, cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const args = ["-xz"];
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
args.push("--force-local");
archivePath = archivePath.replace(/\\/g, "/");
cachePath = cachePath.replace(/\\/g, "/");
}
args.push(...["-f", archivePath, "-C", cachePath]);
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
const archiveFileSize = fs.statSync(archivePath).size;
core.debug(`File Size: ${archiveFileSize}`);
await exec(`"${tarPath}"`, args);
const isExactKeyMatch = utils.isExactKeyMatch(
primaryKey,
@ -101,7 +101,7 @@ async function run(): Promise<void> {
`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`
);
} catch (error) {
utils.logWarning(error.message);
core.warning(error.message);
utils.setCacheHitOutput(false);
}
} catch (error) {

View File

@ -1,29 +1,22 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as fs from "fs";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { createTar } from "./tar";
import { Inputs, State } from "./constants";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
async function run() {
try {
if (!utils.isValidEvent()) {
utils.logWarning(
`Event Validation Error: The event type ${
process.env[Events.Key]
} is not supported. Only ${utils
.getSupportedEvents()
.join(", ")} events are supported at this time.`
);
return;
}
const state = utils.getCacheState();
// Inputs are re-evaluted before the post action, so we want the original key used for restore
const primaryKey = core.getState(State.CacheKey);
if (!primaryKey) {
utils.logWarning(`Error retrieving key from state.`);
core.warning(`Error retrieving key from state.`);
return;
}
@ -34,44 +27,47 @@ async function run(): Promise<void> {
return;
}
core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
if (cacheId == -1) {
core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
return;
}
core.debug(`Cache ID: ${cacheId}`);
const cachePath = utils.resolvePath(
let cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true })
);
core.debug(`Cache Path: ${cachePath}`);
const archivePath = path.join(
let archivePath = path.join(
await utils.createTempDirectory(),
"cache.tgz"
);
core.debug(`Archive Path: ${archivePath}`);
await createTar(archivePath, cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const args = ["-cz"];
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
args.push("--force-local");
archivePath = archivePath.replace(/\\/g, "/");
cachePath = cachePath.replace(/\\/g, "/");
}
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
args.push(...["-f", archivePath, "-C", cachePath, "."]);
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const fileSizeLimit = 200 * 1024 * 1024; // 200MB
const archiveFileSize = fs.statSync(archivePath).size;
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
core.warning(
`Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.`
);
return;
}
core.debug(`Saving Cache (ID: ${cacheId})`);
await cacheHttpClient.saveCache(cacheId, archivePath);
const stream = fs.createReadStream(archivePath);
await cacheHttpClient.saveCache(stream, primaryKey);
} catch (error) {
utils.logWarning(error.message);
core.warning(error.message);
}
}

View File

@ -1,47 +0,0 @@
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import { existsSync } from "fs";
async function getTarPath(): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) {
return systemTar;
}
}
return await io.which("tar", true);
}
async function execTar(args: string[]): Promise<void> {
try {
await exec(`"${await getTarPath()}"`, args);
} catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
);
}
throw new Error(`Tar failed with error: ${error?.message}`);
}
}
export async function extractTar(
archivePath: string,
targetDirectory: string
): Promise<void> {
// Create directory to extract tar into
await io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
await execTar(args);
}
export async function createTar(
archivePath: string,
sourceDirectory: string
): Promise<void> {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
await execTar(args);
}

View File

@ -1,11 +1,10 @@
import * as core from "@actions/core";
import * as io from "@actions/io";
import * as fs from "fs";
import * as os from "os";
import * as path from "path";
import * as uuidV4 from "uuid/v4";
import { Events, Outputs, State } from "../constants";
import { Outputs, State } from "../constants";
import { ArtifactCacheEntry } from "../contracts";
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
@ -33,10 +32,6 @@ export async function createTempDirectory(): Promise<string> {
return dest;
}
export function getArchiveFileSize(path: string): number {
return fs.statSync(path).size;
}
export function isExactKeyMatch(
key: string,
cacheResult?: ArtifactCacheEntry
@ -50,18 +45,10 @@ export function isExactKeyMatch(
);
}
export function setCacheState(state: ArtifactCacheEntry): void {
core.saveState(State.CacheResult, JSON.stringify(state));
}
export function setCacheHitOutput(isCacheHit: boolean): void {
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
}
export function setOutputAndState(
key: string,
cacheResult?: ArtifactCacheEntry
): void {
) {
setCacheHitOutput(isExactKeyMatch(key, cacheResult));
// Store the cache result if it exists
cacheResult && setCacheState(cacheResult);
@ -70,16 +57,15 @@ export function setOutputAndState(
export function getCacheState(): ArtifactCacheEntry | undefined {
const stateData = core.getState(State.CacheResult);
core.debug(`State: ${stateData}`);
if (stateData) {
return JSON.parse(stateData) as ArtifactCacheEntry;
return (stateData && JSON.parse(stateData)) as ArtifactCacheEntry;
}
return undefined;
export function setCacheState(state: ArtifactCacheEntry) {
core.saveState(State.CacheResult, JSON.stringify(state));
}
export function logWarning(message: string): void {
const warningPrefix = "[warning]";
core.info(`${warningPrefix}${message}`);
export function setCacheHitOutput(isCacheHit: boolean) {
core.setOutput(Outputs.CacheHit, isCacheHit.toString());
}
export function resolvePath(filePath: string): string {
@ -93,15 +79,3 @@ export function resolvePath(filePath: string): string {
return path.resolve(filePath);
}
export function getSupportedEvents(): string[] {
return [Events.Push, Events.PullRequest];
}
// Currently the cache token is only authorized for push and pull_request events
// All other events will fail when reading and saving the cache
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
export function isValidEvent(): boolean {
const githubEvent = process.env[Events.Key] || "";
return getSupportedEvents().includes(githubEvent);
}

View File

@ -1,29 +1,7 @@
import { Inputs } from "../constants";
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
function getInputName(name: string): string {
return `INPUT_${name.replace(/ /g, "_").toUpperCase()}`;
}
export function setInput(name: string, value: string): void {
export function setInput(name: string, value: string) {
process.env[getInputName(name)] = value;
}
interface CacheInput {
path: string;
key: string;
restoreKeys?: string[];
}
export function setInputs(input: CacheInput): void {
setInput(Inputs.Path, input.path);
setInput(Inputs.Key, input.key);
input.restoreKeys &&
setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n"));
}
export function clearInputs(): void {
delete process.env[getInputName(Inputs.Path)];
delete process.env[getInputName(Inputs.Key)];
delete process.env[getInputName(Inputs.RestoreKeys)];
}