mirror of
https://github.com/actions/cache.git
synced 2025-06-25 11:51:11 +02:00
Compare commits
51 Commits
v1.1.1
...
add-retrie
Author | SHA1 | Date | |
---|---|---|---|
c8d75a8073 | |||
a8b61326cf | |||
25b1a139de | |||
6efe05572d | |||
aced43a650 | |||
ce9276c90e | |||
9eb452c280 | |||
75cd46ec0c | |||
a5d9a3b1a6 | |||
97f7baa910 | |||
9ceee97d99 | |||
ccf9619480 | |||
9f07ee13de | |||
1ed0c23029 | |||
54626c4a4f | |||
48b62c1c52 | |||
9bb13c71ec | |||
8b2a57849f | |||
f00dedfa6c | |||
12b87469d4 | |||
52046d1409 | |||
08438313d5 | |||
7ccdf5c70d | |||
306f72536b | |||
4fa017f2b7 | |||
78809b91d7 | |||
a4e3c3b64e | |||
e5370355e6 | |||
0e86d5c038 | |||
2ba9edf492 | |||
f15bc7a0d9 | |||
b6b8aa78d8 | |||
272268544c | |||
64f8769515 | |||
4a724707e9 | |||
f60097cd16 | |||
eb78578266 | |||
22d71e33ad | |||
b13df3fa54 | |||
cae64ca3cd | |||
af8651e0c5 | |||
6c471ae9f6 | |||
206172ea8e | |||
5833d5c131 | |||
826785142a | |||
8e9c167fd7 | |||
e8230b28a9 | |||
4944275b95 | |||
78a4b2143b | |||
4dc4b4e758 | |||
85aee6a487 |
@ -12,5 +12,12 @@
|
|||||||
"plugin:prettier/recommended",
|
"plugin:prettier/recommended",
|
||||||
"prettier/@typescript-eslint"
|
"prettier/@typescript-eslint"
|
||||||
],
|
],
|
||||||
"plugins": ["@typescript-eslint", "jest"]
|
"plugins": ["@typescript-eslint", "simple-import-sort", "jest"],
|
||||||
|
"rules": {
|
||||||
|
"import/first": "error",
|
||||||
|
"import/newline-after-import": "error",
|
||||||
|
"import/no-duplicates": "error",
|
||||||
|
"simple-import-sort/sort": "error",
|
||||||
|
"sort-imports": "off"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
35
.github/workflows/codeql.yml
vendored
Normal file
35
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
name: "Code Scanning - Action"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
CodeQL-Build:
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
|
||||||
|
|
||||||
|
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
# Override language selection by uncommenting this and choosing your languages
|
||||||
|
# with:
|
||||||
|
# languages: go, javascript, csharp, python, cpp, java
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below).
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
119
.github/workflows/workflow.yml
vendored
119
.github/workflows/workflow.yml
vendored
@ -13,42 +13,129 @@ on:
|
|||||||
- '**.md'
|
- '**.md'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
# Build and unit test
|
||||||
name: Test on ${{ matrix.os }}
|
build:
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
- uses: actions/setup-node@v1
|
- name: Setup Node.js
|
||||||
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: '12.x'
|
node-version: '12.x'
|
||||||
|
- name: Determine npm cache directory
|
||||||
- name: Get npm cache directory
|
|
||||||
id: npm-cache
|
id: npm-cache
|
||||||
run: |
|
run: |
|
||||||
echo "::set-output name=dir::$(npm config get cache)"
|
echo "::set-output name=dir::$(npm config get cache)"
|
||||||
|
- name: Restore npm cache
|
||||||
- uses: actions/cache@v1
|
uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.npm-cache.outputs.dir }}
|
path: ${{ steps.npm-cache.outputs.dir }}
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-node-
|
${{ runner.os }}-node-
|
||||||
|
|
||||||
- run: npm ci
|
- run: npm ci
|
||||||
|
|
||||||
- name: Prettier Format Check
|
- name: Prettier Format Check
|
||||||
run: npm run format-check
|
run: npm run format-check
|
||||||
|
|
||||||
- name: ESLint Check
|
- name: ESLint Check
|
||||||
run: npm run lint
|
run: npm run lint
|
||||||
|
|
||||||
- name: Build & Test
|
- name: Build & Test
|
||||||
run: npm run test
|
run: npm run test
|
||||||
|
|
||||||
|
# End to end save and restore
|
||||||
|
test-save:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
fail-fast: false
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Generate files in working directory
|
||||||
|
shell: bash
|
||||||
|
run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache
|
||||||
|
- name: Generate files outside working directory
|
||||||
|
shell: bash
|
||||||
|
run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||||
|
- name: Save cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||||
|
path: |
|
||||||
|
test-cache
|
||||||
|
~/test-cache
|
||||||
|
test-restore:
|
||||||
|
needs: test-save
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||||
|
fail-fast: false
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Restore cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||||
|
path: |
|
||||||
|
test-cache
|
||||||
|
~/test-cache
|
||||||
|
- name: Verify cache files in working directory
|
||||||
|
shell: bash
|
||||||
|
run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache
|
||||||
|
- name: Verify cache files outside working directory
|
||||||
|
shell: bash
|
||||||
|
run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
||||||
|
|
||||||
|
# End to end with proxy
|
||||||
|
test-proxy-save:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:latest
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Generate files
|
||||||
|
run: __tests__/create-cache-files.sh proxy test-cache
|
||||||
|
- name: Save cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-proxy-${{ github.run_id }}
|
||||||
|
path: test-cache
|
||||||
|
test-proxy-restore:
|
||||||
|
needs: test-proxy-save
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ubuntu:latest
|
||||||
|
options: --dns 127.0.0.1
|
||||||
|
services:
|
||||||
|
squid-proxy:
|
||||||
|
image: datadog/squid:latest
|
||||||
|
ports:
|
||||||
|
- 3128:3128
|
||||||
|
env:
|
||||||
|
https_proxy: http://squid-proxy:3128
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Restore cache
|
||||||
|
uses: ./
|
||||||
|
with:
|
||||||
|
key: test-proxy-${{ github.run_id }}
|
||||||
|
path: test-cache
|
||||||
|
- name: Verify cache
|
||||||
|
run: __tests__/verify-cache-files.sh proxy test-cache
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,8 +1,5 @@
|
|||||||
__tests__/runner/*
|
__tests__/runner/*
|
||||||
|
|
||||||
# comment out in distribution branches
|
|
||||||
dist/
|
|
||||||
|
|
||||||
node_modules/
|
node_modules/
|
||||||
lib/
|
lib/
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- name: Cache Primes
|
- name: Cache Primes
|
||||||
id: cache-primes
|
id: cache-primes
|
||||||
@ -67,7 +67,9 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
|||||||
- [Java - Gradle](./examples.md#java---gradle)
|
- [Java - Gradle](./examples.md#java---gradle)
|
||||||
- [Java - Maven](./examples.md#java---maven)
|
- [Java - Maven](./examples.md#java---maven)
|
||||||
- [Node - npm](./examples.md#node---npm)
|
- [Node - npm](./examples.md#node---npm)
|
||||||
|
- [Node - Lerna](./examples.md#node---lerna)
|
||||||
- [Node - Yarn](./examples.md#node---yarn)
|
- [Node - Yarn](./examples.md#node---yarn)
|
||||||
|
- [OCaml/Reason - esy](./examples.md##ocamlreason---esy)
|
||||||
- [PHP - Composer](./examples.md#php---composer)
|
- [PHP - Composer](./examples.md#php---composer)
|
||||||
- [Python - pip](./examples.md#python---pip)
|
- [Python - pip](./examples.md#python---pip)
|
||||||
- [R - renv](./examples.md#r---renv)
|
- [R - renv](./examples.md#r---renv)
|
||||||
@ -80,7 +82,7 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
|||||||
|
|
||||||
## Cache Limits
|
## Cache Limits
|
||||||
|
|
||||||
A repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
||||||
|
|
||||||
## Skipping steps based on cache-hit
|
## Skipping steps based on cache-hit
|
||||||
|
|
||||||
@ -89,7 +91,7 @@ Using the `cache-hit` output, subsequent steps (such as install or build) can be
|
|||||||
Example:
|
Example:
|
||||||
```yaml
|
```yaml
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
- uses: actions/cache@v1
|
- uses: actions/cache@v1
|
||||||
id: cache
|
id: cache
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as io from "@actions/io";
|
||||||
|
import { promises as fs } from "fs";
|
||||||
import * as os from "os";
|
import * as os from "os";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
@ -6,13 +8,24 @@ import { Events, Outputs, State } from "../src/constants";
|
|||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import * as actionUtils from "../src/utils/actionUtils";
|
import * as actionUtils from "../src/utils/actionUtils";
|
||||||
|
|
||||||
|
import uuid = require("uuid");
|
||||||
|
|
||||||
jest.mock("@actions/core");
|
jest.mock("@actions/core");
|
||||||
jest.mock("os");
|
jest.mock("os");
|
||||||
|
|
||||||
|
function getTempDir(): string {
|
||||||
|
return path.join(__dirname, "_temp", "actionUtils");
|
||||||
|
}
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
delete process.env[Events.Key];
|
delete process.env[Events.Key];
|
||||||
});
|
});
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
delete process.env["GITHUB_WORKSPACE"];
|
||||||
|
await io.rmRF(getTempDir());
|
||||||
|
});
|
||||||
|
|
||||||
test("getArchiveFileSize returns file size", () => {
|
test("getArchiveFileSize returns file size", () => {
|
||||||
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
|
const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt");
|
||||||
|
|
||||||
@ -181,17 +194,43 @@ test("isValidEvent returns false for unknown event", () => {
|
|||||||
expect(isValidEvent).toBe(false);
|
expect(isValidEvent).toBe(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("resolvePath with no ~ in path", () => {
|
test("resolvePaths with no ~ in path", async () => {
|
||||||
const filePath = ".cache/yarn";
|
const filePath = ".cache";
|
||||||
|
|
||||||
const resolvedPath = actionUtils.resolvePath(filePath);
|
// Create the following layout:
|
||||||
|
// cwd
|
||||||
|
// cwd/.cache
|
||||||
|
// cwd/.cache/file.txt
|
||||||
|
|
||||||
const expectedPath = path.resolve(filePath);
|
const root = path.join(getTempDir(), "no-tilde");
|
||||||
expect(resolvedPath).toBe(expectedPath);
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
const cache = path.join(root, ".cache");
|
||||||
|
await fs.mkdir(cache, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(cache, "file.txt"), "cached");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||||
|
|
||||||
|
const expectedPath = [filePath];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("resolvePath with ~ in path", () => {
|
test("resolvePaths with ~ in path", async () => {
|
||||||
const filePath = "~/.cache/yarn";
|
const cacheDir = uuid();
|
||||||
|
const filePath = `~/${cacheDir}`;
|
||||||
|
// Create the following layout:
|
||||||
|
// ~/uuid
|
||||||
|
// ~/uuid/file.txt
|
||||||
|
|
||||||
const homedir = jest.requireActual("os").homedir();
|
const homedir = jest.requireActual("os").homedir();
|
||||||
const homedirMock = jest.spyOn(os, "homedir");
|
const homedirMock = jest.spyOn(os, "homedir");
|
||||||
@ -199,24 +238,93 @@ test("resolvePath with ~ in path", () => {
|
|||||||
return homedir;
|
return homedir;
|
||||||
});
|
});
|
||||||
|
|
||||||
const resolvedPath = actionUtils.resolvePath(filePath);
|
const target = path.join(homedir, cacheDir);
|
||||||
|
await fs.mkdir(target, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(target, "file.txt"), "cached");
|
||||||
|
|
||||||
const expectedPath = path.join(homedir, ".cache/yarn");
|
const root = getTempDir();
|
||||||
expect(resolvedPath).toBe(expectedPath);
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([filePath]);
|
||||||
|
|
||||||
|
const expectedPath = [path.relative(root, target)];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
await io.rmRF(target);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
test("resolvePath with home not found", () => {
|
test("resolvePaths with home not found", async () => {
|
||||||
const filePath = "~/.cache/yarn";
|
const filePath = "~/.cache/yarn";
|
||||||
const homedirMock = jest.spyOn(os, "homedir");
|
const homedirMock = jest.spyOn(os, "homedir");
|
||||||
homedirMock.mockImplementation(() => {
|
homedirMock.mockImplementation(() => {
|
||||||
return "";
|
return "";
|
||||||
});
|
});
|
||||||
|
|
||||||
expect(() => actionUtils.resolvePath(filePath)).toThrow(
|
await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow(
|
||||||
"Unable to resolve `~` to HOME"
|
"Unable to determine HOME directory"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("resolvePaths inclusion pattern returns found", async () => {
|
||||||
|
const pattern = "*.ts";
|
||||||
|
// Create the following layout:
|
||||||
|
// inclusion-patterns
|
||||||
|
// inclusion-patterns/miss.txt
|
||||||
|
// inclusion-patterns/test.ts
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "inclusion-patterns");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||||
|
await fs.writeFile(path.join(root, "test.ts"), "match");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths([pattern]);
|
||||||
|
|
||||||
|
const expectedPath = ["test.ts"];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("resolvePaths exclusion pattern returns not found", async () => {
|
||||||
|
const patterns = ["*.ts", "!test.ts"];
|
||||||
|
// Create the following layout:
|
||||||
|
// exclusion-patterns
|
||||||
|
// exclusion-patterns/miss.txt
|
||||||
|
// exclusion-patterns/test.ts
|
||||||
|
|
||||||
|
const root = path.join(getTempDir(), "exclusion-patterns");
|
||||||
|
// tarball entries will be relative to workspace
|
||||||
|
process.env["GITHUB_WORKSPACE"] = root;
|
||||||
|
|
||||||
|
await fs.mkdir(root, { recursive: true });
|
||||||
|
await fs.writeFile(path.join(root, "miss.txt"), "no match");
|
||||||
|
await fs.writeFile(path.join(root, "test.ts"), "no match");
|
||||||
|
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
|
||||||
|
try {
|
||||||
|
process.chdir(root);
|
||||||
|
|
||||||
|
const resolvedPath = await actionUtils.resolvePaths(patterns);
|
||||||
|
|
||||||
|
const expectedPath = [];
|
||||||
|
expect(resolvedPath).toStrictEqual(expectedPath);
|
||||||
|
} finally {
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test("isValidEvent returns true for push event", () => {
|
test("isValidEvent returns true for push event", () => {
|
||||||
const event = Events.Push;
|
const event = Events.Push;
|
||||||
process.env[Events.Key] = event;
|
process.env[Events.Key] = event;
|
||||||
@ -234,3 +342,16 @@ test("isValidEvent returns true for pull request event", () => {
|
|||||||
|
|
||||||
expect(isValidEvent).toBe(true);
|
expect(isValidEvent).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test("unlinkFile unlinks file", async () => {
|
||||||
|
const testDirectory = await fs.mkdtemp("unlinkFileTest");
|
||||||
|
const testFile = path.join(testDirectory, "test.txt");
|
||||||
|
await fs.writeFile(testFile, "hello world");
|
||||||
|
|
||||||
|
await actionUtils.unlinkFile(testFile);
|
||||||
|
|
||||||
|
// This should throw as testFile should not exist
|
||||||
|
await expect(fs.stat(testFile)).rejects.toThrow();
|
||||||
|
|
||||||
|
await fs.rmdir(testDirectory);
|
||||||
|
});
|
||||||
|
177
__tests__/cacheHttpsClient.test.ts
Normal file
177
__tests__/cacheHttpsClient.test.ts
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
import { getCacheVersion, retry } from "../src/cacheHttpClient";
|
||||||
|
import { CompressionMethod, Inputs } from "../src/constants";
|
||||||
|
import * as testUtils from "../src/utils/testUtils";
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
testUtils.clearInputs();
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with path input and compression method undefined returns version", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
|
||||||
|
const result = getCacheVersion();
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with zstd compression returns version", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
const result = getCacheVersion(CompressionMethod.Zstd);
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with gzip compression does not change vesion", async () => {
|
||||||
|
testUtils.setInput(Inputs.Path, "node_modules");
|
||||||
|
const result = getCacheVersion(CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(result).toEqual(
|
||||||
|
"b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("getCacheVersion with no input throws", async () => {
|
||||||
|
expect(() => getCacheVersion()).toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
interface TestResponse {
|
||||||
|
statusCode: number;
|
||||||
|
result: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleResponse(
|
||||||
|
response: TestResponse | undefined
|
||||||
|
): Promise<TestResponse> {
|
||||||
|
if (!response) {
|
||||||
|
fail("Retry method called too many times");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.statusCode === 999) {
|
||||||
|
throw Error("Test Error");
|
||||||
|
} else {
|
||||||
|
return Promise.resolve(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingResult(
|
||||||
|
responses: Array<TestResponse>,
|
||||||
|
expectedResult: string | null
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
const actualResult = await retry(
|
||||||
|
"test",
|
||||||
|
() => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(actualResult.result).toEqual(expectedResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testRetryExpectingError(
|
||||||
|
responses: Array<TestResponse>
|
||||||
|
): Promise<void> {
|
||||||
|
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||||
|
|
||||||
|
expect(
|
||||||
|
retry(
|
||||||
|
"test",
|
||||||
|
() => handleResponse(responses.pop()),
|
||||||
|
(response: TestResponse) => response.statusCode
|
||||||
|
)
|
||||||
|
).rejects.toBeInstanceOf(Error);
|
||||||
|
}
|
||||||
|
|
||||||
|
test("retry works on successful response", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry works after retryable status code", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry fails after exhausting retries", async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 503,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry fails after non-retryable status code", async () => {
|
||||||
|
await testRetryExpectingError([
|
||||||
|
{
|
||||||
|
statusCode: 500,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry works after error", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 999,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"Ok"
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("retry returns after client error", async () => {
|
||||||
|
await testRetryExpectingResult(
|
||||||
|
[
|
||||||
|
{
|
||||||
|
statusCode: 400,
|
||||||
|
result: null
|
||||||
|
},
|
||||||
|
{
|
||||||
|
statusCode: 200,
|
||||||
|
result: "Ok"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
null
|
||||||
|
);
|
||||||
|
});
|
17
__tests__/create-cache-files.sh
Executable file
17
__tests__/create-cache-files.sh
Executable file
@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Validate args
|
||||||
|
prefix="$1"
|
||||||
|
if [ -z "$prefix" ]; then
|
||||||
|
echo "Must supply prefix argument"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
path="$2"
|
||||||
|
if [ -z "$path" ]; then
|
||||||
|
echo "Must supply path argument"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p $path
|
||||||
|
echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt
|
@ -1,7 +1,13 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
import { Events, Inputs } from "../src/constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Inputs
|
||||||
|
} from "../src/constants";
|
||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/restore";
|
import run from "../src/restore";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
@ -13,10 +19,6 @@ jest.mock("../src/tar");
|
|||||||
jest.mock("../src/utils/actionUtils");
|
jest.mock("../src/utils/actionUtils");
|
||||||
|
|
||||||
beforeAll(() => {
|
beforeAll(() => {
|
||||||
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
|
|
||||||
return path.resolve(filePath);
|
|
||||||
});
|
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
|
||||||
(key, cacheResult) => {
|
(key, cacheResult) => {
|
||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
@ -33,6 +35,11 @@ beforeAll(() => {
|
|||||||
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
return actualUtils.getSupportedEvents();
|
return actualUtils.getSupportedEvents();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getCacheFileName(cm);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@ -59,7 +66,8 @@ test("restore with invalid event outputs warning", async () => {
|
|||||||
test("restore with no path should fail", async () => {
|
test("restore with no path should fail", async () => {
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
await run();
|
await run();
|
||||||
expect(failedMock).toHaveBeenCalledWith(
|
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||||
|
expect(failedMock).not.toHaveBeenCalledWith(
|
||||||
"Input required and not supplied: path"
|
"Input required and not supplied: path"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
@ -136,7 +144,7 @@ test("restore with no cache found", async () => {
|
|||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Cache not found for input keys: ${key}.`
|
`Cache not found for input keys: ${key}`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -195,13 +203,12 @@ test("restore with restore keys and no cache found", async () => {
|
|||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Cache not found for input keys: ${key}, ${restoreKey}.`
|
`Cache not found for input keys: ${key}, ${restoreKey}`
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with cache found", async () => {
|
test("restore with gzip compressed cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const cachePath = path.resolve("node_modules");
|
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
@ -230,7 +237,7 @@ test("restore with cache found", async () => {
|
|||||||
return Promise.resolve(tempPath);
|
return Promise.resolve(tempPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
const archivePath = path.join(tempPath, "cache.tgz");
|
const archivePath = path.join(tempPath, CacheFilename.Gzip);
|
||||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
@ -240,12 +247,20 @@ test("restore with cache found", async () => {
|
|||||||
.mockReturnValue(fileSize);
|
.mockReturnValue(fileSize);
|
||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
|
const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
|
||||||
|
const compression = CompressionMethod.Gzip;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key]);
|
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
@ -255,18 +270,21 @@ test("restore with cache found", async () => {
|
|||||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(unlinkFileMock).toHaveBeenCalledWith(archivePath);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with a pull request event and cache found", async () => {
|
test("restore with a pull request event and zstd compressed cache found", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const cachePath = path.resolve("node_modules");
|
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
key
|
key
|
||||||
@ -297,7 +315,7 @@ test("restore with a pull request event and cache found", async () => {
|
|||||||
return Promise.resolve(tempPath);
|
return Promise.resolve(tempPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
const archivePath = path.join(tempPath, "cache.tgz");
|
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
@ -308,11 +326,17 @@ test("restore with a pull request event and cache found", async () => {
|
|||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key]);
|
expect(getCacheMock).toHaveBeenCalledWith([key], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
@ -323,19 +347,19 @@ test("restore with a pull request event and cache found", async () => {
|
|||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with cache found for restore key", async () => {
|
test("restore with cache found for restore key", async () => {
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
const restoreKey = "node-";
|
const restoreKey = "node-";
|
||||||
const cachePath = path.resolve("node_modules");
|
|
||||||
testUtils.setInputs({
|
testUtils.setInputs({
|
||||||
path: "node_modules",
|
path: "node_modules",
|
||||||
key,
|
key,
|
||||||
@ -365,7 +389,7 @@ test("restore with cache found for restore key", async () => {
|
|||||||
return Promise.resolve(tempPath);
|
return Promise.resolve(tempPath);
|
||||||
});
|
});
|
||||||
|
|
||||||
const archivePath = path.join(tempPath, "cache.tgz");
|
const archivePath = path.join(tempPath, CacheFilename.Zstd);
|
||||||
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState");
|
||||||
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache");
|
||||||
|
|
||||||
@ -376,11 +400,17 @@ test("restore with cache found for restore key", async () => {
|
|||||||
|
|
||||||
const extractTarMock = jest.spyOn(tar, "extractTar");
|
const extractTarMock = jest.spyOn(tar, "extractTar");
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
||||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
|
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||||
@ -391,7 +421,7 @@ test("restore with cache found for restore key", async () => {
|
|||||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||||
|
|
||||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression);
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
||||||
@ -400,4 +430,5 @@ test("restore with cache found for restore key", async () => {
|
|||||||
`Cache restored from key: ${restoreKey}`
|
`Cache restored from key: ${restoreKey}`
|
||||||
);
|
);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
@ -1,7 +1,13 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "../src/cacheHttpClient";
|
import * as cacheHttpClient from "../src/cacheHttpClient";
|
||||||
import { Events, Inputs } from "../src/constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Inputs
|
||||||
|
} from "../src/constants";
|
||||||
import { ArtifactCacheEntry } from "../src/contracts";
|
import { ArtifactCacheEntry } from "../src/contracts";
|
||||||
import run from "../src/save";
|
import run from "../src/save";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
@ -40,13 +46,20 @@ beforeAll(() => {
|
|||||||
return actualUtils.getSupportedEvents();
|
return actualUtils.getSupportedEvents();
|
||||||
});
|
});
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
|
jest.spyOn(actionUtils, "resolvePaths").mockImplementation(
|
||||||
return path.resolve(filePath);
|
async filePaths => {
|
||||||
});
|
return filePaths.map(x => path.resolve(x));
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
|
||||||
return Promise.resolve("/foo/bar");
|
return Promise.resolve("/foo/bar");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => {
|
||||||
|
const actualUtils = jest.requireActual("../src/utils/actionUtils");
|
||||||
|
return actualUtils.getCacheFileName(cm);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
@ -189,7 +202,7 @@ test("save with large cache outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
const cachePath = path.resolve(inputPath);
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
@ -198,20 +211,27 @@ test("save with large cache outputs warning", async () => {
|
|||||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||||
return cacheSize;
|
return cacheSize;
|
||||||
});
|
});
|
||||||
|
const compression = CompressionMethod.Gzip;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
const archiveFolder = "/foo/bar";
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with reserve cache failure outputs warning", async () => {
|
test("save with reserve cache failure outputs warning", async () => {
|
||||||
@ -247,13 +267,18 @@ test("save with reserve cache failure outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(infoMock).toHaveBeenCalledWith(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
@ -263,6 +288,7 @@ test("save with reserve cache failure outputs warning", async () => {
|
|||||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with server error outputs warning", async () => {
|
test("save with server error outputs warning", async () => {
|
||||||
@ -288,7 +314,7 @@ test("save with server error outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
const cachePath = path.resolve(inputPath);
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const cacheId = 4;
|
const cacheId = 4;
|
||||||
@ -305,24 +331,36 @@ test("save with server error outputs warning", async () => {
|
|||||||
.mockImplementationOnce(() => {
|
.mockImplementationOnce(() => {
|
||||||
throw new Error("HTTP Error Occurred");
|
throw new Error("HTTP Error Occurred");
|
||||||
});
|
});
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
const archiveFolder = "/foo/bar";
|
||||||
|
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("save with valid inputs uploads a cache", async () => {
|
test("save with valid inputs uploads a cache", async () => {
|
||||||
@ -347,7 +385,7 @@ test("save with valid inputs uploads a cache", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const inputPath = "node_modules";
|
const inputPath = "node_modules";
|
||||||
const cachePath = path.resolve(inputPath);
|
const cachePaths = [path.resolve(inputPath)];
|
||||||
testUtils.setInput(Inputs.Path, inputPath);
|
testUtils.setInput(Inputs.Path, inputPath);
|
||||||
|
|
||||||
const cacheId = 4;
|
const cacheId = 4;
|
||||||
@ -360,19 +398,31 @@ test("save with valid inputs uploads a cache", async () => {
|
|||||||
const createTarMock = jest.spyOn(tar, "createTar");
|
const createTarMock = jest.spyOn(tar, "createTar");
|
||||||
|
|
||||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||||
|
const compression = CompressionMethod.Zstd;
|
||||||
|
const getCompressionMock = jest
|
||||||
|
.spyOn(actionUtils, "getCompressionMethod")
|
||||||
|
.mockReturnValue(Promise.resolve(compression));
|
||||||
|
|
||||||
await run();
|
await run();
|
||||||
|
|
||||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, {
|
||||||
|
compressionMethod: compression
|
||||||
|
});
|
||||||
|
|
||||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
const archiveFolder = "/foo/bar";
|
||||||
|
const archiveFile = path.join(archiveFolder, CacheFilename.Zstd);
|
||||||
|
|
||||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
expect(createTarMock).toHaveBeenCalledWith(
|
||||||
|
archiveFolder,
|
||||||
|
cachePaths,
|
||||||
|
compression
|
||||||
|
);
|
||||||
|
|
||||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile);
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
|
expect(getCompressionMock).toHaveBeenCalledTimes(1);
|
||||||
});
|
});
|
||||||
|
@ -1,58 +1,204 @@
|
|||||||
import * as exec from "@actions/exec";
|
import * as exec from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
|
import { CacheFilename, CompressionMethod } from "../src/constants";
|
||||||
import * as tar from "../src/tar";
|
import * as tar from "../src/tar";
|
||||||
|
import * as utils from "../src/utils/actionUtils";
|
||||||
|
|
||||||
|
import fs = require("fs");
|
||||||
|
|
||||||
jest.mock("@actions/exec");
|
jest.mock("@actions/exec");
|
||||||
jest.mock("@actions/io");
|
jest.mock("@actions/io");
|
||||||
|
|
||||||
beforeAll(() => {
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
|
|
||||||
|
function getTempDir(): string {
|
||||||
|
return path.join(__dirname, "_temp", "tar");
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
jest.spyOn(io, "which").mockImplementation(tool => {
|
jest.spyOn(io, "which").mockImplementation(tool => {
|
||||||
return Promise.resolve(tool);
|
return Promise.resolve(tool);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
process.env["GITHUB_WORKSPACE"] = process.cwd();
|
||||||
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
});
|
});
|
||||||
|
|
||||||
test("extract tar", async () => {
|
afterAll(async () => {
|
||||||
|
delete process.env["GITHUB_WORKSPACE"];
|
||||||
|
await jest.requireActual("@actions/io").rmRF(getTempDir());
|
||||||
|
});
|
||||||
|
|
||||||
|
test("zstd extract tar", async () => {
|
||||||
const mkdirMock = jest.spyOn(io, "mkdirP");
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archivePath = "cache.tar";
|
const archivePath = IS_WINDOWS
|
||||||
const targetDirectory = "~/.npm/cache";
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
await tar.extractTar(archivePath, targetDirectory);
|
: "cache.tar";
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
|
await tar.extractTar(archivePath, CompressionMethod.Zstd);
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
expect(execMock).toHaveBeenCalledTimes(1);
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
"-xz",
|
`"${tarPath}"`,
|
||||||
"-f",
|
[
|
||||||
archivePath,
|
"--use-compress-program",
|
||||||
"-C",
|
"zstd -d --long=30",
|
||||||
targetDirectory
|
"-xf",
|
||||||
]);
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("create tar", async () => {
|
test("gzip extract tar", async () => {
|
||||||
|
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
const archivePath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||||
|
: "cache.tar";
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(mkdirMock).toHaveBeenCalledWith(workspace);
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"${tarPath}"`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-xf",
|
||||||
|
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip extract GNU tar on windows", async () => {
|
||||||
|
if (IS_WINDOWS) {
|
||||||
|
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
||||||
|
|
||||||
|
const isGnuMock = jest
|
||||||
|
.spyOn(utils, "useGnuTar")
|
||||||
|
.mockReturnValue(Promise.resolve(true));
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
|
||||||
|
await tar.extractTar(archivePath, CompressionMethod.Gzip);
|
||||||
|
|
||||||
|
expect(isGnuMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"tar"`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-xf",
|
||||||
|
archivePath.replace(/\\/g, "/"),
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
workspace?.replace(/\\/g, "/"),
|
||||||
|
"--force-local"
|
||||||
|
],
|
||||||
|
{ cwd: undefined }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test("zstd create tar", async () => {
|
||||||
const execMock = jest.spyOn(exec, "exec");
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
const archivePath = "cache.tar";
|
const archiveFolder = getTempDir();
|
||||||
const sourceDirectory = "~/.npm/cache";
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
await tar.createTar(archivePath, sourceDirectory);
|
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||||
|
|
||||||
|
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
|
await tar.createTar(
|
||||||
|
archiveFolder,
|
||||||
|
sourceDirectories,
|
||||||
|
CompressionMethod.Zstd
|
||||||
|
);
|
||||||
|
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
|
||||||
const tarPath = IS_WINDOWS
|
const tarPath = IS_WINDOWS
|
||||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
: "tar";
|
: "tar";
|
||||||
|
|
||||||
expect(execMock).toHaveBeenCalledTimes(1);
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
"-cz",
|
`"${tarPath}"`,
|
||||||
"-f",
|
[
|
||||||
archivePath,
|
"--use-compress-program",
|
||||||
"-C",
|
"zstd -T0 --long=30",
|
||||||
sourceDirectory,
|
"-cf",
|
||||||
"."
|
IS_WINDOWS
|
||||||
]);
|
? CacheFilename.Zstd.replace(/\\/g, "/")
|
||||||
|
: CacheFilename.Zstd,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||||
|
"--files-from",
|
||||||
|
"manifest.txt"
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: archiveFolder
|
||||||
|
}
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test("gzip create tar", async () => {
|
||||||
|
const execMock = jest.spyOn(exec, "exec");
|
||||||
|
|
||||||
|
const archiveFolder = getTempDir();
|
||||||
|
const workspace = process.env["GITHUB_WORKSPACE"];
|
||||||
|
const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`];
|
||||||
|
|
||||||
|
await fs.promises.mkdir(archiveFolder, { recursive: true });
|
||||||
|
|
||||||
|
await tar.createTar(
|
||||||
|
archiveFolder,
|
||||||
|
sourceDirectories,
|
||||||
|
CompressionMethod.Gzip
|
||||||
|
);
|
||||||
|
|
||||||
|
const tarPath = IS_WINDOWS
|
||||||
|
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||||
|
: "tar";
|
||||||
|
|
||||||
|
expect(execMock).toHaveBeenCalledTimes(1);
|
||||||
|
expect(execMock).toHaveBeenCalledWith(
|
||||||
|
`"${tarPath}"`,
|
||||||
|
[
|
||||||
|
"-z",
|
||||||
|
"-cf",
|
||||||
|
IS_WINDOWS
|
||||||
|
? CacheFilename.Gzip.replace(/\\/g, "/")
|
||||||
|
: CacheFilename.Gzip,
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace,
|
||||||
|
"--files-from",
|
||||||
|
"manifest.txt"
|
||||||
|
],
|
||||||
|
{
|
||||||
|
cwd: archiveFolder
|
||||||
|
}
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
36
__tests__/verify-cache-files.sh
Executable file
36
__tests__/verify-cache-files.sh
Executable file
@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# Validate args
|
||||||
|
prefix="$1"
|
||||||
|
if [ -z "$prefix" ]; then
|
||||||
|
echo "Must supply prefix argument"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
path="$2"
|
||||||
|
if [ -z "$path" ]; then
|
||||||
|
echo "Must specify path argument"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Sanity check GITHUB_RUN_ID defined
|
||||||
|
if [ -z "$GITHUB_RUN_ID" ]; then
|
||||||
|
echo "GITHUB_RUN_ID not defined"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify file exists
|
||||||
|
file="$path/test-file.txt"
|
||||||
|
echo "Checking for $file"
|
||||||
|
if [ ! -e $file ]; then
|
||||||
|
echo "File does not exist"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Verify file content
|
||||||
|
content="$(cat $file)"
|
||||||
|
echo "File content:\n$content"
|
||||||
|
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
|
||||||
|
echo "Unexpected file content"
|
||||||
|
exit 1
|
||||||
|
fi
|
@ -1,4 +1,4 @@
|
|||||||
name: 'Cache Artifacts'
|
name: 'Cache'
|
||||||
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||||
author: 'GitHub'
|
author: 'GitHub'
|
||||||
inputs:
|
inputs:
|
||||||
|
5499
dist/restore/index.js
vendored
Normal file
5499
dist/restore/index.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5476
dist/save/index.js
vendored
Normal file
5476
dist/save/index.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
150
examples.md
150
examples.md
@ -1,22 +1,34 @@
|
|||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
- [C# - NuGet](#c---nuget)
|
- [Examples](#examples)
|
||||||
- [Elixir - Mix](#elixir---mix)
|
- [C# - NuGet](#c---nuget)
|
||||||
- [Go - Modules](#go---modules)
|
- [Elixir - Mix](#elixir---mix)
|
||||||
- [Haskell - Cabal](#haskell---cabal)
|
- [Go - Modules](#go---modules)
|
||||||
- [Java - Gradle](#java---gradle)
|
- [Haskell - Cabal](#haskell---cabal)
|
||||||
- [Java - Maven](#java---maven)
|
- [Java - Gradle](#java---gradle)
|
||||||
- [Node - npm](#node---npm)
|
- [Java - Maven](#java---maven)
|
||||||
- [Node - Yarn](#node---yarn)
|
- [Node - npm](#node---npm)
|
||||||
- [PHP - Composer](#php---composer)
|
- [macOS and Ubuntu](#macos-and-ubuntu)
|
||||||
- [Python - pip](#python---pip)
|
- [Windows](#windows)
|
||||||
- [R - renv](#r---renv)
|
- [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config)
|
||||||
- [Ruby - Bundler](#ruby---bundler)
|
- [Node - Lerna](#node---lerna)
|
||||||
- [Rust - Cargo](#rust---cargo)
|
- [Node - Yarn](#node---yarn)
|
||||||
- [Scala - SBT](#scala---sbt)
|
- [OCaml/Reason - esy](#ocamlreason---esy)
|
||||||
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
|
- [PHP - Composer](#php---composer)
|
||||||
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
- [Python - pip](#python---pip)
|
||||||
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
- [Simple example](#simple-example)
|
||||||
|
- [Multiple OS's in a workflow](#multiple-oss-in-a-workflow)
|
||||||
|
- [Using pip to get cache location](#using-pip-to-get-cache-location)
|
||||||
|
- [Using a script to get cache location](#using-a-script-to-get-cache-location)
|
||||||
|
- [R - renv](#r---renv)
|
||||||
|
- [Simple example](#simple-example-1)
|
||||||
|
- [Multiple OS's in a workflow](#multiple-oss-in-a-workflow-1)
|
||||||
|
- [Ruby - Bundler](#ruby---bundler)
|
||||||
|
- [Rust - Cargo](#rust---cargo)
|
||||||
|
- [Scala - SBT](#scala---sbt)
|
||||||
|
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
|
||||||
|
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
||||||
|
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
||||||
|
|
||||||
## C# - NuGet
|
## C# - NuGet
|
||||||
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
||||||
@ -94,7 +106,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
- uses: actions/cache@v1
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
path: ~/.gradle/caches
|
path: ~/.gradle/caches
|
||||||
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
|
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-gradle-
|
${{ runner.os }}-gradle-
|
||||||
```
|
```
|
||||||
@ -130,17 +142,6 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
|
|||||||
### Windows
|
### Windows
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v1
|
|
||||||
with:
|
|
||||||
path: ~\AppData\Roaming\npm-cache
|
|
||||||
key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }}
|
|
||||||
restore-keys: |
|
|
||||||
${{ runner.os }}-node-
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using multiple systems and `npm config`
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
- name: Get npm cache directory
|
- name: Get npm cache directory
|
||||||
id: npm-cache
|
id: npm-cache
|
||||||
run: |
|
run: |
|
||||||
@ -153,25 +154,82 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o
|
|||||||
${{ runner.os }}-node-
|
${{ runner.os }}-node-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Using multiple systems and `npm config`
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Get npm cache directory
|
||||||
|
id: npm-cache
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=dir::$(npm config get cache)"
|
||||||
|
- uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ${{ steps.npm-cache.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-node-
|
||||||
|
```
|
||||||
|
|
||||||
|
## Node - Lerna
|
||||||
|
|
||||||
|
>Note this example uses the new multi-paths feature and is only available at `master`
|
||||||
|
```yaml
|
||||||
|
- name: restore lerna
|
||||||
|
uses: actions/cache@master
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
node_modules
|
||||||
|
*/*/node_modules
|
||||||
|
key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }}
|
||||||
|
```
|
||||||
|
|
||||||
## Node - Yarn
|
## Node - Yarn
|
||||||
The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info.
|
The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Get yarn cache
|
- name: Get yarn cache directory path
|
||||||
id: yarn-cache
|
id: yarn-cache-dir-path
|
||||||
run: echo "::set-output name=dir::$(yarn cache dir)"
|
run: echo "::set-output name=dir::$(yarn cache dir)"
|
||||||
|
|
||||||
- uses: actions/cache@v1
|
- uses: actions/cache@v1
|
||||||
|
id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`)
|
||||||
with:
|
with:
|
||||||
path: ${{ steps.yarn-cache.outputs.dir }}
|
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
|
||||||
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-yarn-
|
${{ runner.os }}-yarn-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## OCaml/Reason - esy
|
||||||
|
Esy allows you to export built dependencies and import pre-built dependencies.
|
||||||
|
```yaml
|
||||||
|
- name: Restore Cache
|
||||||
|
id: restore-cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: _export
|
||||||
|
key: ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-esy-
|
||||||
|
- name: Esy install
|
||||||
|
run: 'esy install'
|
||||||
|
- name: Import Cache
|
||||||
|
run: |
|
||||||
|
esy import-dependencies _export
|
||||||
|
rm -rf _export
|
||||||
|
|
||||||
|
...(Build job)...
|
||||||
|
|
||||||
|
# Re-export dependencies if anything has changed or if it is the first time
|
||||||
|
- name: Setting dependency cache
|
||||||
|
run: |
|
||||||
|
esy export-dependencies
|
||||||
|
if: steps.restore-cache.outputs.cache-hit != 'true'
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## PHP - Composer
|
## PHP - Composer
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- name: Get Composer Cache Directory
|
- name: Get Composer Cache Directory
|
||||||
id: composer-cache
|
id: composer-cache
|
||||||
run: |
|
run: |
|
||||||
@ -233,14 +291,32 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
|||||||
${{ runner.os }}-pip-
|
${{ runner.os }}-pip-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Using pip to get cache location
|
||||||
|
|
||||||
|
> Note: This requires pip 20.1+
|
||||||
|
```yaml
|
||||||
|
- name: Get pip cache dir
|
||||||
|
id: pip-cache
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=dir::$(pip cache dir)"
|
||||||
|
|
||||||
|
- name: pip cache
|
||||||
|
uses: actions/cache@v1
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pip-
|
||||||
|
```
|
||||||
|
|
||||||
### Using a script to get cache location
|
### Using a script to get cache location
|
||||||
|
|
||||||
> Note: This uses an internal pip API and may not always work
|
> Note: This uses an internal pip API and may not always work
|
||||||
```yaml
|
```yaml
|
||||||
- name: Get pip cache
|
- name: Get pip cache dir
|
||||||
id: pip-cache
|
id: pip-cache
|
||||||
run: |
|
run: |
|
||||||
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
|
python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)"
|
||||||
|
|
||||||
- uses: actions/cache@v1
|
- uses: actions/cache@v1
|
||||||
with:
|
with:
|
||||||
|
3943
package-lock.json
generated
3943
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
11
package.json
11
package.json
@ -1,16 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "1.1.1",
|
"version": "1.1.2",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts",
|
||||||
"test": "tsc --noEmit && jest --coverage",
|
"test": "tsc --noEmit && jest --coverage",
|
||||||
"lint": "eslint **/*.ts --cache",
|
"lint": "eslint **/*.ts --cache",
|
||||||
"format": "prettier --write **/*.ts",
|
"format": "prettier --write **/*.ts",
|
||||||
"format-check": "prettier --check **/*.ts",
|
"format-check": "prettier --check **/*.ts"
|
||||||
"release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/"
|
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -26,7 +25,8 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.0",
|
"@actions/core": "^1.2.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
"@actions/http-client": "^1.0.6",
|
"@actions/glob": "^0.1.0",
|
||||||
|
"@actions/http-client": "^1.0.8",
|
||||||
"@actions/io": "^1.0.1",
|
"@actions/io": "^1.0.1",
|
||||||
"uuid": "^3.3.3"
|
"uuid": "^3.3.3"
|
||||||
},
|
},
|
||||||
@ -43,6 +43,7 @@
|
|||||||
"eslint-plugin-import": "^2.18.2",
|
"eslint-plugin-import": "^2.18.2",
|
||||||
"eslint-plugin-jest": "^23.0.3",
|
"eslint-plugin-jest": "^23.0.3",
|
||||||
"eslint-plugin-prettier": "^3.1.1",
|
"eslint-plugin-prettier": "^3.1.1",
|
||||||
|
"eslint-plugin-simple-import-sort": "^5.0.2",
|
||||||
"jest": "^24.8.0",
|
"jest": "^24.8.0",
|
||||||
"jest-circus": "^24.7.1",
|
"jest-circus": "^24.7.1",
|
||||||
"nock": "^11.7.0",
|
"nock": "^11.7.0",
|
||||||
|
@ -1,20 +1,28 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as fs from "fs";
|
|
||||||
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
|
||||||
import { HttpClient, HttpCodes } from "@actions/http-client";
|
import { HttpClient, HttpCodes } from "@actions/http-client";
|
||||||
|
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
||||||
import {
|
import {
|
||||||
IHttpClientResponse,
|
IHttpClientResponse,
|
||||||
IRequestOptions,
|
IRequestOptions,
|
||||||
ITypedResponse
|
ITypedResponse
|
||||||
} from "@actions/http-client/interfaces";
|
} from "@actions/http-client/interfaces";
|
||||||
|
import * as crypto from "crypto";
|
||||||
|
import * as fs from "fs";
|
||||||
|
import * as stream from "stream";
|
||||||
|
import * as util from "util";
|
||||||
|
|
||||||
|
import { CompressionMethod, Inputs, SocketTimeout } from "./constants";
|
||||||
import {
|
import {
|
||||||
ArtifactCacheEntry,
|
ArtifactCacheEntry,
|
||||||
|
CacheOptions,
|
||||||
CommitCacheRequest,
|
CommitCacheRequest,
|
||||||
ReserveCacheRequest,
|
ReserveCacheRequest,
|
||||||
ReserveCacheResponse
|
ReserveCacheResponse
|
||||||
} from "./contracts";
|
} from "./contracts";
|
||||||
import * as utils from "./utils/actionUtils";
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
const versionSalt = "1.0";
|
||||||
|
|
||||||
function isSuccessStatusCode(statusCode?: number): boolean {
|
function isSuccessStatusCode(statusCode?: number): boolean {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false;
|
return false;
|
||||||
@ -22,6 +30,13 @@ function isSuccessStatusCode(statusCode?: number): boolean {
|
|||||||
return statusCode >= 200 && statusCode < 300;
|
return statusCode >= 200 && statusCode < 300;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isServerErrorStatusCode(statusCode?: number): boolean {
|
||||||
|
if (!statusCode) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return statusCode >= 500;
|
||||||
|
}
|
||||||
|
|
||||||
function isRetryableStatusCode(statusCode?: number): boolean {
|
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||||
if (!statusCode) {
|
if (!statusCode) {
|
||||||
return false;
|
return false;
|
||||||
@ -77,21 +92,106 @@ function createHttpClient(): HttpClient {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function getCacheVersion(compressionMethod?: CompressionMethod): string {
|
||||||
|
const components = [core.getInput(Inputs.Path, { required: true })].concat(
|
||||||
|
compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : []
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add salt to cache version to support breaking changes in cache entry
|
||||||
|
components.push(versionSalt);
|
||||||
|
|
||||||
|
return crypto
|
||||||
|
.createHash("sha256")
|
||||||
|
.update(components.join("|"))
|
||||||
|
.digest("hex");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retry<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<T>,
|
||||||
|
getStatusCode: (T) => number | undefined,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<T> {
|
||||||
|
let response: T | undefined = undefined;
|
||||||
|
let statusCode: number | undefined = undefined;
|
||||||
|
let isRetryable = false;
|
||||||
|
let errorMessage = "";
|
||||||
|
let attempt = 1;
|
||||||
|
|
||||||
|
while (attempt <= maxAttempts) {
|
||||||
|
try {
|
||||||
|
response = await method();
|
||||||
|
statusCode = getStatusCode(response);
|
||||||
|
|
||||||
|
if (!isServerErrorStatusCode(statusCode)) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
isRetryable = isRetryableStatusCode(statusCode);
|
||||||
|
errorMessage = `Cache service responded with ${statusCode}`;
|
||||||
|
} catch (error) {
|
||||||
|
isRetryable = true;
|
||||||
|
errorMessage = error.message;
|
||||||
|
}
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isRetryable) {
|
||||||
|
core.debug(`${name} - Error is not retryable`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
attempt++;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Error(`${name} failed: ${errorMessage}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryTypedResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<ITypedResponse<T>>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<ITypedResponse<T>> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: ITypedResponse<T>) => response.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function retryHttpClientResponse<T>(
|
||||||
|
name: string,
|
||||||
|
method: () => Promise<IHttpClientResponse>,
|
||||||
|
maxAttempts = 2
|
||||||
|
): Promise<IHttpClientResponse> {
|
||||||
|
return await retry(
|
||||||
|
name,
|
||||||
|
method,
|
||||||
|
(response: IHttpClientResponse) => response.message.statusCode,
|
||||||
|
maxAttempts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
export async function getCacheEntry(
|
export async function getCacheEntry(
|
||||||
keys: string[]
|
keys: string[],
|
||||||
|
options?: CacheOptions
|
||||||
): Promise<ArtifactCacheEntry | null> {
|
): Promise<ArtifactCacheEntry | null> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
const version = getCacheVersion(options?.compressionMethod);
|
||||||
|
const resource = `cache?keys=${encodeURIComponent(
|
||||||
|
keys.join(",")
|
||||||
|
)}&version=${version}`;
|
||||||
|
|
||||||
const response = await httpClient.getJson<ArtifactCacheEntry>(
|
const response = await retryTypedResponse("getCacheEntry", () =>
|
||||||
getCacheApiUrl(resource)
|
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
|
||||||
);
|
);
|
||||||
|
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
if (!isSuccessStatusCode(response.statusCode)) {
|
|
||||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const cacheResult = response.result;
|
const cacheResult = response.result;
|
||||||
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
||||||
@ -107,13 +207,10 @@ export async function getCacheEntry(
|
|||||||
|
|
||||||
async function pipeResponseToStream(
|
async function pipeResponseToStream(
|
||||||
response: IHttpClientResponse,
|
response: IHttpClientResponse,
|
||||||
stream: NodeJS.WritableStream
|
output: NodeJS.WritableStream
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
return new Promise(resolve => {
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
response.message.pipe(stream).on("close", () => {
|
await pipeline(response.message, output);
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function downloadCache(
|
export async function downloadCache(
|
||||||
@ -122,21 +219,58 @@ export async function downloadCache(
|
|||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const stream = fs.createWriteStream(archivePath);
|
const stream = fs.createWriteStream(archivePath);
|
||||||
const httpClient = new HttpClient("actions/cache");
|
const httpClient = new HttpClient("actions/cache");
|
||||||
const downloadResponse = await httpClient.get(archiveLocation);
|
const downloadResponse = await retryHttpClientResponse(
|
||||||
|
"downloadCache",
|
||||||
|
() => httpClient.get(archiveLocation)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Abort download if no traffic received over the socket.
|
||||||
|
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
||||||
|
downloadResponse.message.destroy();
|
||||||
|
core.debug(
|
||||||
|
`Aborting download, socket timed out after ${SocketTimeout} ms`
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
await pipeResponseToStream(downloadResponse, stream);
|
await pipeResponseToStream(downloadResponse, stream);
|
||||||
|
|
||||||
|
// Validate download size.
|
||||||
|
const contentLengthHeader =
|
||||||
|
downloadResponse.message.headers["content-length"];
|
||||||
|
|
||||||
|
if (contentLengthHeader) {
|
||||||
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
|
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||||
|
|
||||||
|
if (actualLength != expectedLength) {
|
||||||
|
throw new Error(
|
||||||
|
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
core.debug("Unable to validate download, no Content-Length header");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reserve Cache
|
// Reserve Cache
|
||||||
export async function reserveCache(key: string): Promise<number> {
|
export async function reserveCache(
|
||||||
|
key: string,
|
||||||
|
options?: CacheOptions
|
||||||
|
): Promise<number> {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
|
const version = getCacheVersion(options?.compressionMethod);
|
||||||
|
|
||||||
const reserveCacheRequest: ReserveCacheRequest = {
|
const reserveCacheRequest: ReserveCacheRequest = {
|
||||||
key
|
key,
|
||||||
|
version
|
||||||
};
|
};
|
||||||
const response = await httpClient.postJson<ReserveCacheResponse>(
|
const response = await retryTypedResponse("reserveCache", () =>
|
||||||
getCacheApiUrl("caches"),
|
httpClient.postJson<ReserveCacheResponse>(
|
||||||
reserveCacheRequest
|
getCacheApiUrl("caches"),
|
||||||
|
reserveCacheRequest
|
||||||
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
return response?.result?.cacheId ?? -1;
|
return response?.result?.cacheId ?? -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -152,7 +286,7 @@ function getContentRange(start: number, end: number): string {
|
|||||||
async function uploadChunk(
|
async function uploadChunk(
|
||||||
httpClient: HttpClient,
|
httpClient: HttpClient,
|
||||||
resourceUrl: string,
|
resourceUrl: string,
|
||||||
data: NodeJS.ReadableStream,
|
openStream: () => NodeJS.ReadableStream,
|
||||||
start: number,
|
start: number,
|
||||||
end: number
|
end: number
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
@ -173,28 +307,14 @@ async function uploadChunk(
|
|||||||
return await httpClient.sendStream(
|
return await httpClient.sendStream(
|
||||||
"PATCH",
|
"PATCH",
|
||||||
resourceUrl,
|
resourceUrl,
|
||||||
data,
|
openStream(),
|
||||||
additionalHeaders
|
additionalHeaders
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|
||||||
const response = await uploadChunkRequest();
|
await retryHttpClientResponse(
|
||||||
if (isSuccessStatusCode(response.message.statusCode)) {
|
`uploadChunk (start: ${start}, end: ${end})`,
|
||||||
return;
|
uploadChunkRequest
|
||||||
}
|
|
||||||
|
|
||||||
if (isRetryableStatusCode(response.message.statusCode)) {
|
|
||||||
core.debug(
|
|
||||||
`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`
|
|
||||||
);
|
|
||||||
const retryResponse = await uploadChunkRequest();
|
|
||||||
if (isSuccessStatusCode(retryResponse.message.statusCode)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
throw new Error(
|
|
||||||
`Cache service responded with ${response.message.statusCode} during chunk upload.`
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -236,17 +356,17 @@ async function uploadFile(
|
|||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
const end = offset + chunkSize - 1;
|
||||||
offset += MAX_CHUNK_SIZE;
|
offset += MAX_CHUNK_SIZE;
|
||||||
const chunk = fs.createReadStream(archivePath, {
|
|
||||||
fd,
|
|
||||||
start,
|
|
||||||
end,
|
|
||||||
autoClose: false
|
|
||||||
});
|
|
||||||
|
|
||||||
await uploadChunk(
|
await uploadChunk(
|
||||||
httpClient,
|
httpClient,
|
||||||
resourceUrl,
|
resourceUrl,
|
||||||
chunk,
|
() =>
|
||||||
|
fs.createReadStream(archivePath, {
|
||||||
|
fd,
|
||||||
|
start,
|
||||||
|
end,
|
||||||
|
autoClose: false
|
||||||
|
}),
|
||||||
start,
|
start,
|
||||||
end
|
end
|
||||||
);
|
);
|
||||||
@ -265,9 +385,11 @@ async function commitCache(
|
|||||||
filesize: number
|
filesize: number
|
||||||
): Promise<ITypedResponse<null>> {
|
): Promise<ITypedResponse<null>> {
|
||||||
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
||||||
return await httpClient.postJson<null>(
|
return await retryTypedResponse("commitCache", () =>
|
||||||
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
httpClient.postJson<null>(
|
||||||
commitCacheRequest
|
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||||
|
commitCacheRequest
|
||||||
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,3 +18,18 @@ export enum Events {
|
|||||||
Push = "push",
|
Push = "push",
|
||||||
PullRequest = "pull_request"
|
PullRequest = "pull_request"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export enum CacheFilename {
|
||||||
|
Gzip = "cache.tgz",
|
||||||
|
Zstd = "cache.tzst"
|
||||||
|
}
|
||||||
|
|
||||||
|
export enum CompressionMethod {
|
||||||
|
Gzip = "gzip",
|
||||||
|
Zstd = "zstd"
|
||||||
|
}
|
||||||
|
|
||||||
|
// Socket timeout in milliseconds during download. If no traffic is received
|
||||||
|
// over the socket during this period, the socket is destroyed and the download
|
||||||
|
// is aborted.
|
||||||
|
export const SocketTimeout = 5000;
|
||||||
|
6
src/contracts.d.ts
vendored
6
src/contracts.d.ts
vendored
@ -1,3 +1,5 @@
|
|||||||
|
import { CompressionMethod } from "./constants";
|
||||||
|
|
||||||
export interface ArtifactCacheEntry {
|
export interface ArtifactCacheEntry {
|
||||||
cacheKey?: string;
|
cacheKey?: string;
|
||||||
scope?: string;
|
scope?: string;
|
||||||
@ -17,3 +19,7 @@ export interface ReserveCacheRequest {
|
|||||||
export interface ReserveCacheResponse {
|
export interface ReserveCacheResponse {
|
||||||
cacheId: number;
|
cacheId: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface CacheOptions {
|
||||||
|
compressionMethod?: CompressionMethod;
|
||||||
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "./cacheHttpClient";
|
import * as cacheHttpClient from "./cacheHttpClient";
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
import { extractTar } from "./tar";
|
import { extractTar } from "./tar";
|
||||||
@ -19,11 +20,6 @@ async function run(): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const cachePath = utils.resolvePath(
|
|
||||||
core.getInput(Inputs.Path, { required: true })
|
|
||||||
);
|
|
||||||
core.debug(`Cache Path: ${cachePath}`);
|
|
||||||
|
|
||||||
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
const primaryKey = core.getInput(Inputs.Key, { required: true });
|
||||||
core.saveState(State.CacheKey, primaryKey);
|
core.saveState(State.CacheKey, primaryKey);
|
||||||
|
|
||||||
@ -58,38 +54,49 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
|
const cacheEntry = await cacheHttpClient.getCacheEntry(keys, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
if (!cacheEntry?.archiveLocation) {
|
if (!cacheEntry?.archiveLocation) {
|
||||||
core.info(
|
core.info(`Cache not found for input keys: ${keys.join(", ")}`);
|
||||||
`Cache not found for input keys: ${keys.join(", ")}.`
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const archivePath = path.join(
|
const archivePath = path.join(
|
||||||
await utils.createTempDirectory(),
|
await utils.createTempDirectory(),
|
||||||
"cache.tgz"
|
utils.getCacheFileName(compressionMethod)
|
||||||
);
|
);
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
// Store the cache result
|
// Store the cache result
|
||||||
utils.setCacheState(cacheEntry);
|
utils.setCacheState(cacheEntry);
|
||||||
|
|
||||||
// Download the cache from the cache entry
|
try {
|
||||||
await cacheHttpClient.downloadCache(
|
// Download the cache from the cache entry
|
||||||
cacheEntry.archiveLocation,
|
await cacheHttpClient.downloadCache(
|
||||||
archivePath
|
cacheEntry.archiveLocation,
|
||||||
);
|
archivePath
|
||||||
|
);
|
||||||
|
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
core.info(
|
core.info(
|
||||||
`Cache Size: ~${Math.round(
|
`Cache Size: ~${Math.round(
|
||||||
archiveFileSize / (1024 * 1024)
|
archiveFileSize / (1024 * 1024)
|
||||||
)} MB (${archiveFileSize} B)`
|
)} MB (${archiveFileSize} B)`
|
||||||
);
|
);
|
||||||
|
|
||||||
await extractTar(archivePath, cachePath);
|
await extractTar(archivePath, compressionMethod);
|
||||||
|
} finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
await utils.unlinkFile(archivePath);
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(
|
const isExactKeyMatch = utils.isExactKeyMatch(
|
||||||
primaryKey,
|
primaryKey,
|
||||||
|
26
src/save.ts
26
src/save.ts
@ -1,5 +1,6 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
|
||||||
import * as cacheHttpClient from "./cacheHttpClient";
|
import * as cacheHttpClient from "./cacheHttpClient";
|
||||||
import { Events, Inputs, State } from "./constants";
|
import { Events, Inputs, State } from "./constants";
|
||||||
import { createTar } from "./tar";
|
import { createTar } from "./tar";
|
||||||
@ -34,8 +35,12 @@ async function run(): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const compressionMethod = await utils.getCompressionMethod();
|
||||||
|
|
||||||
core.debug("Reserving Cache");
|
core.debug("Reserving Cache");
|
||||||
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
|
const cacheId = await cacheHttpClient.reserveCache(primaryKey, {
|
||||||
|
compressionMethod: compressionMethod
|
||||||
|
});
|
||||||
if (cacheId == -1) {
|
if (cacheId == -1) {
|
||||||
core.info(
|
core.info(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
@ -43,18 +48,25 @@ async function run(): Promise<void> {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
core.debug(`Cache ID: ${cacheId}`);
|
core.debug(`Cache ID: ${cacheId}`);
|
||||||
const cachePath = utils.resolvePath(
|
const cachePaths = await utils.resolvePaths(
|
||||||
core.getInput(Inputs.Path, { required: true })
|
core
|
||||||
|
.getInput(Inputs.Path, { required: true })
|
||||||
|
.split("\n")
|
||||||
|
.filter(x => x !== "")
|
||||||
);
|
);
|
||||||
core.debug(`Cache Path: ${cachePath}`);
|
|
||||||
|
|
||||||
|
core.debug("Cache Paths:");
|
||||||
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
|
|
||||||
|
const archiveFolder = await utils.createTempDirectory();
|
||||||
const archivePath = path.join(
|
const archivePath = path.join(
|
||||||
await utils.createTempDirectory(),
|
archiveFolder,
|
||||||
"cache.tgz"
|
utils.getCacheFileName(compressionMethod)
|
||||||
);
|
);
|
||||||
|
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
|
||||||
await createTar(archivePath, cachePath);
|
await createTar(archiveFolder, cachePaths, compressionMethod);
|
||||||
|
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||||
|
74
src/tar.ts
74
src/tar.ts
@ -1,47 +1,87 @@
|
|||||||
import { exec } from "@actions/exec";
|
import { exec } from "@actions/exec";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import { existsSync } from "fs";
|
import { existsSync, writeFileSync } from "fs";
|
||||||
|
import * as path from "path";
|
||||||
|
|
||||||
async function getTarPath(): Promise<string> {
|
import { CompressionMethod } from "./constants";
|
||||||
|
import * as utils from "./utils/actionUtils";
|
||||||
|
|
||||||
|
async function getTarPath(args: string[]): Promise<string> {
|
||||||
// Explicitly use BSD Tar on Windows
|
// Explicitly use BSD Tar on Windows
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
const IS_WINDOWS = process.platform === "win32";
|
||||||
if (IS_WINDOWS) {
|
if (IS_WINDOWS) {
|
||||||
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
||||||
if (existsSync(systemTar)) {
|
if (existsSync(systemTar)) {
|
||||||
return systemTar;
|
return systemTar;
|
||||||
|
} else if (await utils.useGnuTar()) {
|
||||||
|
args.push("--force-local");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return await io.which("tar", true);
|
return await io.which("tar", true);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function execTar(args: string[]): Promise<void> {
|
async function execTar(args: string[], cwd?: string): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await exec(`"${await getTarPath()}"`, args);
|
await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const IS_WINDOWS = process.platform === "win32";
|
|
||||||
if (IS_WINDOWS) {
|
|
||||||
throw new Error(
|
|
||||||
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
throw new Error(`Tar failed with error: ${error?.message}`);
|
throw new Error(`Tar failed with error: ${error?.message}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getWorkingDirectory(): string {
|
||||||
|
return process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
|
}
|
||||||
|
|
||||||
export async function extractTar(
|
export async function extractTar(
|
||||||
archivePath: string,
|
archivePath: string,
|
||||||
targetDirectory: string
|
compressionMethod: CompressionMethod
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// Create directory to extract tar into
|
// Create directory to extract tar into
|
||||||
await io.mkdirP(targetDirectory);
|
const workingDirectory = getWorkingDirectory();
|
||||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
await io.mkdirP(workingDirectory);
|
||||||
|
// --d: Decompress.
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
const args = [
|
||||||
|
...(compressionMethod == CompressionMethod.Zstd
|
||||||
|
? ["--use-compress-program", "zstd -d --long=30"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-xf",
|
||||||
|
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||||
|
];
|
||||||
await execTar(args);
|
await execTar(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createTar(
|
export async function createTar(
|
||||||
archivePath: string,
|
archiveFolder: string,
|
||||||
sourceDirectory: string
|
sourceDirectories: string[],
|
||||||
|
compressionMethod: CompressionMethod
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
// Write source directories to manifest.txt to avoid command length limits
|
||||||
await execTar(args);
|
const manifestFilename = "manifest.txt";
|
||||||
|
const cacheFileName = utils.getCacheFileName(compressionMethod);
|
||||||
|
writeFileSync(
|
||||||
|
path.join(archiveFolder, manifestFilename),
|
||||||
|
sourceDirectories.join("\n")
|
||||||
|
);
|
||||||
|
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
|
||||||
|
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
|
||||||
|
// Using 30 here because we also support 32-bit self-hosted runners.
|
||||||
|
const workingDirectory = getWorkingDirectory();
|
||||||
|
const args = [
|
||||||
|
...(compressionMethod == CompressionMethod.Zstd
|
||||||
|
? ["--use-compress-program", "zstd -T0 --long=30"]
|
||||||
|
: ["-z"]),
|
||||||
|
"-cf",
|
||||||
|
cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"-P",
|
||||||
|
"-C",
|
||||||
|
workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||||
|
"--files-from",
|
||||||
|
manifestFilename
|
||||||
|
];
|
||||||
|
await execTar(args, archiveFolder);
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,20 @@
|
|||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
import * as exec from "@actions/exec";
|
||||||
|
import * as glob from "@actions/glob";
|
||||||
import * as io from "@actions/io";
|
import * as io from "@actions/io";
|
||||||
import * as fs from "fs";
|
import * as fs from "fs";
|
||||||
import * as os from "os";
|
import * as os from "os";
|
||||||
import * as path from "path";
|
import * as path from "path";
|
||||||
|
import * as util from "util";
|
||||||
import * as uuidV4 from "uuid/v4";
|
import * as uuidV4 from "uuid/v4";
|
||||||
|
|
||||||
import { Events, Outputs, State } from "../constants";
|
import {
|
||||||
|
CacheFilename,
|
||||||
|
CompressionMethod,
|
||||||
|
Events,
|
||||||
|
Outputs,
|
||||||
|
State
|
||||||
|
} from "../constants";
|
||||||
import { ArtifactCacheEntry } from "../contracts";
|
import { ArtifactCacheEntry } from "../contracts";
|
||||||
|
|
||||||
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23
|
||||||
@ -28,6 +37,7 @@ export async function createTempDirectory(): Promise<string> {
|
|||||||
}
|
}
|
||||||
tempDirectory = path.join(baseLocation, "actions", "temp");
|
tempDirectory = path.join(baseLocation, "actions", "temp");
|
||||||
}
|
}
|
||||||
|
|
||||||
const dest = path.join(tempDirectory, uuidV4.default());
|
const dest = path.join(tempDirectory, uuidV4.default());
|
||||||
await io.mkdirP(dest);
|
await io.mkdirP(dest);
|
||||||
return dest;
|
return dest;
|
||||||
@ -82,16 +92,21 @@ export function logWarning(message: string): void {
|
|||||||
core.info(`${warningPrefix}${message}`);
|
core.info(`${warningPrefix}${message}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function resolvePath(filePath: string): string {
|
export async function resolvePaths(patterns: string[]): Promise<string[]> {
|
||||||
if (filePath[0] === "~") {
|
const paths: string[] = [];
|
||||||
const home = os.homedir();
|
const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd();
|
||||||
if (!home) {
|
const globber = await glob.create(patterns.join("\n"), {
|
||||||
throw new Error("Unable to resolve `~` to HOME");
|
implicitDescendants: false
|
||||||
}
|
});
|
||||||
return path.join(home, filePath.slice(1));
|
|
||||||
|
for await (const file of globber.globGenerator()) {
|
||||||
|
const relativeFile = path.relative(workspace, file);
|
||||||
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return path.resolve(filePath);
|
return paths;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getSupportedEvents(): string[] {
|
export function getSupportedEvents(): string[] {
|
||||||
@ -105,3 +120,53 @@ export function isValidEvent(): boolean {
|
|||||||
const githubEvent = process.env[Events.Key] || "";
|
const githubEvent = process.env[Events.Key] || "";
|
||||||
return getSupportedEvents().includes(githubEvent);
|
return getSupportedEvents().includes(githubEvent);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function unlinkFile(path: fs.PathLike): Promise<void> {
|
||||||
|
return util.promisify(fs.unlink)(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getVersion(app: string): Promise<string> {
|
||||||
|
core.debug(`Checking ${app} --version`);
|
||||||
|
let versionOutput = "";
|
||||||
|
try {
|
||||||
|
await exec.exec(`${app} --version`, [], {
|
||||||
|
ignoreReturnCode: true,
|
||||||
|
silent: true,
|
||||||
|
listeners: {
|
||||||
|
stdout: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString()),
|
||||||
|
stderr: (data: Buffer): string =>
|
||||||
|
(versionOutput += data.toString())
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
core.debug(err.message);
|
||||||
|
}
|
||||||
|
|
||||||
|
versionOutput = versionOutput.trim();
|
||||||
|
core.debug(versionOutput);
|
||||||
|
return versionOutput;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getCompressionMethod(): Promise<CompressionMethod> {
|
||||||
|
// Disabling zstd on Windows due to https://github.com/actions/cache/issues/301
|
||||||
|
if (os.platform() === "win32") {
|
||||||
|
return CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
const versionOutput = await getVersion("zstd");
|
||||||
|
return versionOutput.toLowerCase().includes("zstd command line interface")
|
||||||
|
? CompressionMethod.Zstd
|
||||||
|
: CompressionMethod.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCacheFileName(compressionMethod: CompressionMethod): string {
|
||||||
|
return compressionMethod == CompressionMethod.Zstd
|
||||||
|
? CacheFilename.Zstd
|
||||||
|
: CacheFilename.Gzip;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function useGnuTar(): Promise<boolean> {
|
||||||
|
const versionOutput = await getVersion("tar");
|
||||||
|
return versionOutput.toLowerCase().includes("gnu tar");
|
||||||
|
}
|
||||||
|
Reference in New Issue
Block a user