mirror of
https://github.com/actions/cache.git
synced 2025-06-24 19:31:10 +02:00
Compare commits
46 Commits
joshmgross
...
v1
Author | SHA1 | Date | |
---|---|---|---|
f5ce41475b | |||
68fa0a8d81 | |||
56ec64e417 | |||
efbc4e162b | |||
d9747005de | |||
3f662ca624 | |||
0232e3178d | |||
ee7a57c615 | |||
da9f90cb83 | |||
ec7f7ebd08 | |||
2a973a0f4e | |||
cbbb8b4d4f | |||
5a0add1806 | |||
9fe7ad8b07 | |||
7c7d003bbb | |||
96e5a46c57 | |||
84e606dfac | |||
70655ec832 | |||
78a4b2143b | |||
4dc4b4e758 | |||
85aee6a487 | |||
fe1055e9d1 | |||
fab26f3f4f | |||
4887979af8 | |||
f9c9166ecb | |||
23e301d35c | |||
e43776276f | |||
b6d538e2aa | |||
296374f6c9 | |||
6c11532937 | |||
c33bff8d72 | |||
d1991bb4c5 | |||
60e292adf7 | |||
a505c2e7a6 | |||
c262ac0154 | |||
10a14413e7 | |||
cf4f44db70 | |||
4c4974aff1 | |||
1da52de10f | |||
b45d91cc4b | |||
cffae9552b | |||
44543250bd | |||
6491e51b66 | |||
86dff562ab | |||
0f810ad45a | |||
9d8c7b4041 |
112
.github/workflows/workflow.yml
vendored
112
.github/workflows/workflow.yml
vendored
@ -4,50 +4,130 @@ on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test on ${{ matrix.os }}
|
||||
|
||||
# Build and unit test
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
- uses: actions/setup-node@v1
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: '12.x'
|
||||
|
||||
- name: Get npm cache directory
|
||||
- name: Determine npm cache directory
|
||||
id: npm-cache
|
||||
run: |
|
||||
echo "::set-output name=dir::$(npm config get cache)"
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- name: Restore npm cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ${{ steps.npm-cache.outputs.dir }}
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
|
||||
- run: npm ci
|
||||
|
||||
- name: Prettier Format Check
|
||||
run: npm run format-check
|
||||
|
||||
- name: ESLint Check
|
||||
run: npm run lint
|
||||
|
||||
- name: Build & Test
|
||||
run: npm run test
|
||||
|
||||
# End to end save and restore
|
||||
test-save:
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Generate files
|
||||
shell: bash
|
||||
run: __tests__/create-cache-files.sh ${{ runner.os }}
|
||||
- name: Save cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||
path: test-cache
|
||||
test-restore:
|
||||
needs: test-save
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macOS-latest]
|
||||
fail-fast: false
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Restore cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-${{ runner.os }}-${{ github.run_id }}
|
||||
path: test-cache
|
||||
- name: Verify cache
|
||||
shell: bash
|
||||
run: __tests__/verify-cache-files.sh ${{ runner.os }}
|
||||
|
||||
# End to end with proxy
|
||||
test-proxy-save:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:latest
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Generate files
|
||||
run: __tests__/create-cache-files.sh proxy
|
||||
- name: Save cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-proxy-${{ github.run_id }}
|
||||
path: test-cache
|
||||
test-proxy-restore:
|
||||
needs: test-proxy-save
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ubuntu:latest
|
||||
options: --dns 127.0.0.1
|
||||
services:
|
||||
squid-proxy:
|
||||
image: ubuntu/squid:latest
|
||||
ports:
|
||||
- 3128:3128
|
||||
env:
|
||||
https_proxy: http://squid-proxy:3128
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Restore cache
|
||||
uses: ./
|
||||
with:
|
||||
key: test-proxy-${{ github.run_id }}
|
||||
path: test-cache
|
||||
- name: Verify cache
|
||||
run: __tests__/verify-cache-files.sh proxy
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -94,3 +94,6 @@ typings/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# Text editor files
|
||||
.vscode/
|
||||
|
11
README.md
11
README.md
@ -1,6 +1,6 @@
|
||||
# cache
|
||||
|
||||
This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
|
||||
This action allows caching dependencies and build outputs to improve workflow execution time.
|
||||
|
||||
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
|
||||
|
||||
@ -63,21 +63,24 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
|
||||
- [C# - Nuget](./examples.md#c---nuget)
|
||||
- [Elixir - Mix](./examples.md#elixir---mix)
|
||||
- [Go - Modules](./examples.md#go---modules)
|
||||
- [Haskell - Cabal](./examples.md#haskell---cabal)
|
||||
- [Java - Gradle](./examples.md#java---gradle)
|
||||
- [Java - Maven](./examples.md#java---maven)
|
||||
- [Node - npm](./examples.md#node---npm)
|
||||
- [Node - Yarn](./examples.md#node---yarn)
|
||||
- [PHP - Composer](./examples.md#php---composer)
|
||||
- [Python - pip](./examples.md#python---pip)
|
||||
- [Ruby - Gem](./examples.md#ruby---gem)
|
||||
- [R - renv](./examples.md#r---renv)
|
||||
- [Ruby - Bundler](./examples.md#ruby---bundler)
|
||||
- [Rust - Cargo](./examples.md#rust---cargo)
|
||||
- [Scala - SBT](./examples.md#scala---sbt)
|
||||
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
|
||||
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
|
||||
|
||||
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
|
||||
|
||||
## Cache Limits
|
||||
|
||||
Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
||||
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
|
||||
|
||||
## Skipping steps based on cache-hit
|
||||
|
||||
|
144
__tests__/cacheHttpsClient.test.ts
Normal file
144
__tests__/cacheHttpsClient.test.ts
Normal file
@ -0,0 +1,144 @@
|
||||
import { retry } from "../src/cacheHttpClient";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
afterEach(() => {
|
||||
testUtils.clearInputs();
|
||||
});
|
||||
|
||||
interface TestResponse {
|
||||
statusCode: number;
|
||||
result: string | null;
|
||||
}
|
||||
|
||||
function handleResponse(
|
||||
response: TestResponse | undefined
|
||||
): Promise<TestResponse> {
|
||||
if (!response) {
|
||||
fail("Retry method called too many times");
|
||||
}
|
||||
|
||||
if (response.statusCode === 999) {
|
||||
throw Error("Test Error");
|
||||
} else {
|
||||
return Promise.resolve(response);
|
||||
}
|
||||
}
|
||||
|
||||
async function testRetryExpectingResult(
|
||||
responses: Array<TestResponse>,
|
||||
expectedResult: string | null
|
||||
): Promise<void> {
|
||||
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||
|
||||
const actualResult = await retry(
|
||||
"test",
|
||||
() => handleResponse(responses.pop()),
|
||||
(response: TestResponse) => response.statusCode
|
||||
);
|
||||
|
||||
expect(actualResult.result).toEqual(expectedResult);
|
||||
}
|
||||
|
||||
async function testRetryExpectingError(
|
||||
responses: Array<TestResponse>
|
||||
): Promise<void> {
|
||||
responses = responses.reverse(); // Reverse responses since we pop from end
|
||||
|
||||
expect(
|
||||
retry(
|
||||
"test",
|
||||
() => handleResponse(responses.pop()),
|
||||
(response: TestResponse) => response.statusCode
|
||||
)
|
||||
).rejects.toBeInstanceOf(Error);
|
||||
}
|
||||
|
||||
test("retry works on successful response", async () => {
|
||||
await testRetryExpectingResult(
|
||||
[
|
||||
{
|
||||
statusCode: 200,
|
||||
result: "Ok"
|
||||
}
|
||||
],
|
||||
"Ok"
|
||||
);
|
||||
});
|
||||
|
||||
test("retry works after retryable status code", async () => {
|
||||
await testRetryExpectingResult(
|
||||
[
|
||||
{
|
||||
statusCode: 503,
|
||||
result: null
|
||||
},
|
||||
{
|
||||
statusCode: 200,
|
||||
result: "Ok"
|
||||
}
|
||||
],
|
||||
"Ok"
|
||||
);
|
||||
});
|
||||
|
||||
test("retry fails after exhausting retries", async () => {
|
||||
await testRetryExpectingError([
|
||||
{
|
||||
statusCode: 503,
|
||||
result: null
|
||||
},
|
||||
{
|
||||
statusCode: 503,
|
||||
result: null
|
||||
},
|
||||
{
|
||||
statusCode: 200,
|
||||
result: "Ok"
|
||||
}
|
||||
]);
|
||||
});
|
||||
|
||||
test("retry fails after non-retryable status code", async () => {
|
||||
await testRetryExpectingError([
|
||||
{
|
||||
statusCode: 500,
|
||||
result: null
|
||||
},
|
||||
{
|
||||
statusCode: 200,
|
||||
result: "Ok"
|
||||
}
|
||||
]);
|
||||
});
|
||||
|
||||
test("retry works after error", async () => {
|
||||
await testRetryExpectingResult(
|
||||
[
|
||||
{
|
||||
statusCode: 999,
|
||||
result: null
|
||||
},
|
||||
{
|
||||
statusCode: 200,
|
||||
result: "Ok"
|
||||
}
|
||||
],
|
||||
"Ok"
|
||||
);
|
||||
});
|
||||
|
||||
test("retry returns after client error", async () => {
|
||||
await testRetryExpectingResult(
|
||||
[
|
||||
{
|
||||
statusCode: 400,
|
||||
result: null
|
||||
},
|
||||
{
|
||||
statusCode: 200,
|
||||
result: "Ok"
|
||||
}
|
||||
],
|
||||
null
|
||||
);
|
||||
});
|
11
__tests__/create-cache-files.sh
Executable file
11
__tests__/create-cache-files.sh
Executable file
@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Validate args
|
||||
prefix="$1"
|
||||
if [ -z "$prefix" ]; then
|
||||
echo "Must supply prefix argument"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir test-cache
|
||||
echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt
|
@ -248,7 +248,10 @@ test("restore with cache found", async () => {
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key]);
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
|
||||
expect(extractTarMock).toHaveBeenCalledTimes(1);
|
||||
@ -312,7 +315,10 @@ test("restore with a pull request event and cache found", async () => {
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key]);
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
|
||||
|
||||
@ -377,7 +383,10 @@ test("restore with cache found for restore key", async () => {
|
||||
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
|
||||
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
|
||||
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
|
||||
expect(downloadCacheMock).toHaveBeenCalledWith(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
|
||||
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
|
||||
|
||||
|
@ -194,7 +194,7 @@ test("save with large cache outputs warning", async () => {
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit
|
||||
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit
|
||||
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
|
||||
return cacheSize;
|
||||
});
|
||||
@ -208,12 +208,63 @@ test("save with large cache outputs warning", async () => {
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
"Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache."
|
||||
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache."
|
||||
);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with reserve cache failure outputs warning", async () => {
|
||||
const infoMock = jest.spyOn(core, "info");
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
|
||||
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
|
||||
const cacheEntry: ArtifactCacheEntry = {
|
||||
cacheKey: "Linux-node-",
|
||||
scope: "refs/heads/master",
|
||||
creationTime: "2019-11-13T19:18:02+00:00",
|
||||
archiveLocation: "www.actionscache.test/download"
|
||||
};
|
||||
|
||||
jest.spyOn(core, "getState")
|
||||
// Cache Entry State
|
||||
.mockImplementationOnce(() => {
|
||||
return JSON.stringify(cacheEntry);
|
||||
})
|
||||
// Cache Key State
|
||||
.mockImplementationOnce(() => {
|
||||
return primaryKey;
|
||||
});
|
||||
|
||||
const inputPath = "node_modules";
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(-1);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
||||
|
||||
expect(infoMock).toHaveBeenCalledWith(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(0);
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test("save with server error outputs warning", async () => {
|
||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
@ -240,6 +291,13 @@ test("save with server error outputs warning", async () => {
|
||||
const cachePath = path.resolve(inputPath);
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest
|
||||
@ -250,13 +308,16 @@ test("save with server error outputs warning", async () => {
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
||||
|
||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
||||
|
||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
||||
@ -289,18 +350,29 @@ test("save with valid inputs uploads a cache", async () => {
|
||||
const cachePath = path.resolve(inputPath);
|
||||
testUtils.setInput(Inputs.Path, inputPath);
|
||||
|
||||
const cacheId = 4;
|
||||
const reserveCacheMock = jest
|
||||
.spyOn(cacheHttpClient, "reserveCache")
|
||||
.mockImplementationOnce(() => {
|
||||
return Promise.resolve(cacheId);
|
||||
});
|
||||
|
||||
const createTarMock = jest.spyOn(tar, "createTar");
|
||||
|
||||
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
|
||||
|
||||
await run();
|
||||
|
||||
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
|
||||
|
||||
const archivePath = path.join("/foo/bar", "cache.tgz");
|
||||
|
||||
expect(createTarMock).toHaveBeenCalledTimes(1);
|
||||
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
|
||||
|
||||
expect(saveCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
|
||||
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
|
||||
|
||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
@ -2,6 +2,8 @@ import * as exec from "@actions/exec";
|
||||
import * as io from "@actions/io";
|
||||
import * as tar from "../src/tar";
|
||||
|
||||
import fs = require("fs");
|
||||
|
||||
jest.mock("@actions/exec");
|
||||
jest.mock("@actions/io");
|
||||
|
||||
@ -11,17 +13,19 @@ beforeAll(() => {
|
||||
});
|
||||
});
|
||||
|
||||
test("extract tar", async () => {
|
||||
test("extract BSD tar", async () => {
|
||||
const mkdirMock = jest.spyOn(io, "mkdirP");
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archivePath = "cache.tar";
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
const archivePath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\fakepath\\cache.tar`
|
||||
: "cache.tar";
|
||||
const targetDirectory = "~/.npm/cache";
|
||||
await tar.extractTar(archivePath, targetDirectory);
|
||||
|
||||
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
|
||||
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
const tarPath = IS_WINDOWS
|
||||
? `${process.env["windir"]}\\System32\\tar.exe`
|
||||
: "tar";
|
||||
@ -29,13 +33,37 @@ test("extract tar", async () => {
|
||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
||||
"-xz",
|
||||
"-f",
|
||||
archivePath,
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||
"-C",
|
||||
targetDirectory
|
||||
IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory
|
||||
]);
|
||||
});
|
||||
|
||||
test("create tar", async () => {
|
||||
test("extract GNU tar", async () => {
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
if (IS_WINDOWS) {
|
||||
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
|
||||
jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true));
|
||||
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
|
||||
const targetDirectory = "~/.npm/cache";
|
||||
|
||||
await tar.extractTar(archivePath, targetDirectory);
|
||||
|
||||
expect(execMock).toHaveBeenCalledTimes(1);
|
||||
expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [
|
||||
"-xz",
|
||||
"-f",
|
||||
archivePath.replace(/\\/g, "/"),
|
||||
"-C",
|
||||
targetDirectory?.replace(/\\/g, "/"),
|
||||
"--force-local"
|
||||
]);
|
||||
}
|
||||
});
|
||||
|
||||
test("create BSD tar", async () => {
|
||||
const execMock = jest.spyOn(exec, "exec");
|
||||
|
||||
const archivePath = "cache.tar";
|
||||
@ -50,9 +78,9 @@ test("create tar", async () => {
|
||||
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
|
||||
"-cz",
|
||||
"-f",
|
||||
archivePath,
|
||||
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath,
|
||||
"-C",
|
||||
sourceDirectory,
|
||||
IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory,
|
||||
"."
|
||||
]);
|
||||
});
|
||||
|
30
__tests__/verify-cache-files.sh
Executable file
30
__tests__/verify-cache-files.sh
Executable file
@ -0,0 +1,30 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Validate args
|
||||
prefix="$1"
|
||||
if [ -z "$prefix" ]; then
|
||||
echo "Must supply prefix argument"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Sanity check GITHUB_RUN_ID defined
|
||||
if [ -z "$GITHUB_RUN_ID" ]; then
|
||||
echo "GITHUB_RUN_ID not defined"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify file exists
|
||||
file="test-cache/test-file.txt"
|
||||
echo "Checking for $file"
|
||||
if [ ! -e $file ]; then
|
||||
echo "File does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify file content
|
||||
content="$(cat $file)"
|
||||
echo "File content:\n$content"
|
||||
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
|
||||
echo "Unexpected file content"
|
||||
exit 1
|
||||
fi
|
@ -1,5 +1,5 @@
|
||||
name: 'Cache'
|
||||
description: 'Cache dependencies and build outputs to improve workflow execution time'
|
||||
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time'
|
||||
author: 'GitHub'
|
||||
inputs:
|
||||
path:
|
||||
@ -21,4 +21,4 @@ runs:
|
||||
post-if: 'success()'
|
||||
branding:
|
||||
icon: 'archive'
|
||||
color: 'gray-dark'
|
||||
color: 'gray-dark'
|
||||
|
5337
dist/restore/index.js
vendored
Normal file
5337
dist/restore/index.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5318
dist/save/index.js
vendored
Normal file
5318
dist/save/index.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
109
examples.md
109
examples.md
@ -3,16 +3,20 @@
|
||||
- [C# - NuGet](#c---nuget)
|
||||
- [Elixir - Mix](#elixir---mix)
|
||||
- [Go - Modules](#go---modules)
|
||||
- [Haskell - Cabal](#haskell---cabal)
|
||||
- [Java - Gradle](#java---gradle)
|
||||
- [Java - Maven](#java---maven)
|
||||
- [Node - npm](#node---npm)
|
||||
- [Node - Yarn](#node---yarn)
|
||||
- [PHP - Composer](#php---composer)
|
||||
- [Python - pip](#python---pip)
|
||||
- [Ruby - Gem](#ruby---gem)
|
||||
- [R - renv](#r---renv)
|
||||
- [Ruby - Bundler](#ruby---bundler)
|
||||
- [Rust - Cargo](#rust---cargo)
|
||||
- [Scala - SBT](#scala---sbt)
|
||||
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
|
||||
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
||||
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
||||
|
||||
## C# - NuGet
|
||||
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
|
||||
@ -62,6 +66,28 @@ steps:
|
||||
${{ runner.os }}-go-
|
||||
```
|
||||
|
||||
## Haskell - Cabal
|
||||
|
||||
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
name: Cache ~/.cabal/packages
|
||||
with:
|
||||
path: ~/.cabal/packages
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
|
||||
- uses: actions/cache@v1
|
||||
name: Cache ~/.cabal/store
|
||||
with:
|
||||
path: ~/.cabal/store
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
|
||||
- uses: actions/cache@v1
|
||||
name: Cache dist-newstyle
|
||||
with:
|
||||
path: dist-newstyle
|
||||
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
|
||||
```
|
||||
|
||||
## Java - Gradle
|
||||
|
||||
```yaml
|
||||
@ -224,15 +250,64 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
|
||||
${{ runner.os }}-pip-
|
||||
```
|
||||
|
||||
## Ruby - Gem
|
||||
## R - renv
|
||||
|
||||
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
|
||||
|
||||
Locations:
|
||||
- Ubuntu: `~/.local/share/renv`
|
||||
- macOS: `~/Library/Application Support/renv`
|
||||
- Windows: `%LOCALAPPDATA%/renv`
|
||||
|
||||
### Simple example
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.local/share/renv
|
||||
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-renv-
|
||||
```
|
||||
|
||||
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
|
||||
|
||||
### Multiple OS's in a workflow
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
if: startsWith(runner.os, 'Linux')
|
||||
with:
|
||||
path: ~/.local/share/renv
|
||||
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-renv-
|
||||
|
||||
- uses: actions/cache@v1
|
||||
if: startsWith(runner.os, 'macOS')
|
||||
with:
|
||||
path: ~/Library/Application Support/renv
|
||||
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-renv-
|
||||
|
||||
- uses: actions/cache@v1
|
||||
if: startsWith(runner.os, 'Windows')
|
||||
with:
|
||||
path: ~\AppData\Local\renv
|
||||
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-renv-
|
||||
```
|
||||
|
||||
## Ruby - Bundler
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
path: vendor/bundle
|
||||
key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
|
||||
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-gem-
|
||||
${{ runner.os }}-gems-
|
||||
```
|
||||
When dependencies are installed later in the workflow, we must specify the same path for the bundler.
|
||||
|
||||
@ -263,6 +338,21 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
```
|
||||
|
||||
## Scala - SBT
|
||||
|
||||
```yaml
|
||||
- name: Cache SBT ivy cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.ivy2/cache
|
||||
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
|
||||
- name: Cache SBT
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: ~/.sbt
|
||||
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
|
||||
```
|
||||
|
||||
## Swift, Objective-C - Carthage
|
||||
|
||||
```yaml
|
||||
@ -284,3 +374,14 @@ When dependencies are installed later in the workflow, we must specify the same
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pods-
|
||||
```
|
||||
|
||||
## Swift - Swift Package Manager
|
||||
|
||||
```yaml
|
||||
- uses: actions/cache@v1
|
||||
with:
|
||||
path: .build
|
||||
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-spm-
|
||||
```
|
||||
|
67
package-lock.json
generated
67
package-lock.json
generated
@ -1,19 +1,46 @@
|
||||
{
|
||||
"name": "cache",
|
||||
"version": "1.0.3",
|
||||
"version": "1.2.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/core": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.0.tgz",
|
||||
"integrity": "sha512-ZKdyhlSlyz38S6YFfPnyNgCDZuAF2T0Qv5eHflNWytPS8Qjvz39bZFMry9Bb/dpSnqWcNeav5yM2CTYpJeY+Dw=="
|
||||
"version": "1.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz",
|
||||
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==",
|
||||
"requires": {
|
||||
"@actions/http-client": "^2.0.1",
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/http-client": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz",
|
||||
"integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==",
|
||||
"requires": {
|
||||
"tunnel": "^0.0.6"
|
||||
}
|
||||
},
|
||||
"uuid": {
|
||||
"version": "8.3.2",
|
||||
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
|
||||
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
|
||||
}
|
||||
}
|
||||
},
|
||||
"@actions/exec": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
|
||||
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
|
||||
},
|
||||
"@actions/http-client": {
|
||||
"version": "1.0.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz",
|
||||
"integrity": "sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.6"
|
||||
}
|
||||
},
|
||||
"@actions/io": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
|
||||
@ -2854,9 +2881,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"handlebars": {
|
||||
"version": "4.5.1",
|
||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz",
|
||||
"integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==",
|
||||
"version": "4.5.3",
|
||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.3.tgz",
|
||||
"integrity": "sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"neo-async": "^2.6.0",
|
||||
@ -5933,9 +5960,9 @@
|
||||
}
|
||||
},
|
||||
"tunnel": {
|
||||
"version": "0.0.4",
|
||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
|
||||
"integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM="
|
||||
"version": "0.0.6",
|
||||
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
|
||||
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg=="
|
||||
},
|
||||
"tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
@ -5973,15 +6000,6 @@
|
||||
"integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==",
|
||||
"dev": true
|
||||
},
|
||||
"typed-rest-client": {
|
||||
"version": "1.5.0",
|
||||
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.5.0.tgz",
|
||||
"integrity": "sha512-DVZRlmsfnTjp6ZJaatcdyvvwYwbWvR4YDNFDqb+qdTxpvaVP99YCpBkA8rxsLtAPjBVoDe4fNsnMIdZTiPuKWg==",
|
||||
"requires": {
|
||||
"tunnel": "0.0.4",
|
||||
"underscore": "1.8.3"
|
||||
}
|
||||
},
|
||||
"typescript": {
|
||||
"version": "3.7.3",
|
||||
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz",
|
||||
@ -5989,9 +6007,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"uglify-js": {
|
||||
"version": "3.6.7",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz",
|
||||
"integrity": "sha512-4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A==",
|
||||
"version": "3.7.3",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.7.3.tgz",
|
||||
"integrity": "sha512-7tINm46/3puUA4hCkKYo4Xdts+JDaVC9ZPRcG8Xw9R4nhO/gZgUM3TENq8IF4Vatk8qCig4MzP/c8G4u2BkVQg==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
@ -5999,11 +6017,6 @@
|
||||
"source-map": "~0.6.1"
|
||||
}
|
||||
},
|
||||
"underscore": {
|
||||
"version": "1.8.3",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
|
||||
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
|
||||
},
|
||||
"union-value": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cache",
|
||||
"version": "1.0.3",
|
||||
"version": "1.2.0",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
@ -24,10 +24,10 @@
|
||||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.2.0",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.0.1",
|
||||
"@actions/http-client": "^1.0.6",
|
||||
"@actions/io": "^1.0.1",
|
||||
"typed-rest-client": "^1.5.0",
|
||||
"uuid": "^3.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
@ -1,26 +1,66 @@
|
||||
import * as core from "@actions/core";
|
||||
import { HttpClient, HttpCodes } from "@actions/http-client";
|
||||
import { BearerCredentialHandler } from "@actions/http-client/auth";
|
||||
import {
|
||||
IHttpClientResponse,
|
||||
IRequestOptions,
|
||||
ITypedResponse
|
||||
} from "@actions/http-client/interfaces";
|
||||
import * as fs from "fs";
|
||||
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
|
||||
import { HttpClient } from "typed-rest-client/HttpClient";
|
||||
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
|
||||
import { IRequestOptions, RestClient } from "typed-rest-client/RestClient";
|
||||
import { ArtifactCacheEntry } from "./contracts";
|
||||
import * as stream from "stream";
|
||||
import * as util from "util";
|
||||
|
||||
function getCacheUrl(): string {
|
||||
import { SocketTimeout } from "./constants";
|
||||
import {
|
||||
ArtifactCacheEntry,
|
||||
CommitCacheRequest,
|
||||
ReserveCacheRequest,
|
||||
ReserveCacheResponse
|
||||
} from "./contracts";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
function isSuccessStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false;
|
||||
}
|
||||
return statusCode >= 200 && statusCode < 300;
|
||||
}
|
||||
|
||||
function isServerErrorStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return true;
|
||||
}
|
||||
return statusCode >= 500;
|
||||
}
|
||||
|
||||
function isRetryableStatusCode(statusCode?: number): boolean {
|
||||
if (!statusCode) {
|
||||
return false;
|
||||
}
|
||||
const retryableStatusCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
return retryableStatusCodes.includes(statusCode);
|
||||
}
|
||||
|
||||
function getCacheApiUrl(resource: string): string {
|
||||
// Ideally we just use ACTIONS_CACHE_URL
|
||||
const cacheUrl: string = (
|
||||
const baseUrl: string = (
|
||||
process.env["ACTIONS_CACHE_URL"] ||
|
||||
process.env["ACTIONS_RUNTIME_URL"] ||
|
||||
""
|
||||
).replace("pipelines", "artifactcache");
|
||||
if (!cacheUrl) {
|
||||
if (!baseUrl) {
|
||||
throw new Error(
|
||||
"Cache Service Url not found, unable to restore cache."
|
||||
);
|
||||
}
|
||||
|
||||
core.debug(`Cache Url: ${cacheUrl}`);
|
||||
return cacheUrl;
|
||||
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
||||
core.debug(`Resource Url: ${url}`);
|
||||
return url;
|
||||
}
|
||||
|
||||
function createAcceptHeader(type: string, apiVersion: string): string {
|
||||
@ -29,42 +69,116 @@ function createAcceptHeader(type: string, apiVersion: string): string {
|
||||
|
||||
function getRequestOptions(): IRequestOptions {
|
||||
const requestOptions: IRequestOptions = {
|
||||
acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
|
||||
headers: {
|
||||
Accept: createAcceptHeader("application/json", "6.0-preview.1")
|
||||
}
|
||||
};
|
||||
|
||||
return requestOptions;
|
||||
}
|
||||
|
||||
export async function getCacheEntry(
|
||||
keys: string[]
|
||||
): Promise<ArtifactCacheEntry | null> {
|
||||
const cacheUrl = getCacheUrl();
|
||||
function createHttpClient(): HttpClient {
|
||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||
|
||||
const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(
|
||||
keys.join(",")
|
||||
)}`;
|
||||
|
||||
const restClient = new RestClient("actions/cache", cacheUrl, [
|
||||
bearerCredentialHandler
|
||||
]);
|
||||
|
||||
const response = await restClient.get<ArtifactCacheEntry>(
|
||||
resource,
|
||||
return new HttpClient(
|
||||
"actions/cache",
|
||||
[bearerCredentialHandler],
|
||||
getRequestOptions()
|
||||
);
|
||||
}
|
||||
|
||||
export async function retry<T>(
|
||||
name: string,
|
||||
method: () => Promise<T>,
|
||||
getStatusCode: (T) => number | undefined,
|
||||
maxAttempts = 2
|
||||
): Promise<T> {
|
||||
let response: T | undefined = undefined;
|
||||
let statusCode: number | undefined = undefined;
|
||||
let isRetryable = false;
|
||||
let errorMessage = "";
|
||||
let attempt = 1;
|
||||
|
||||
while (attempt <= maxAttempts) {
|
||||
try {
|
||||
response = await method();
|
||||
statusCode = getStatusCode(response);
|
||||
|
||||
if (!isServerErrorStatusCode(statusCode)) {
|
||||
return response;
|
||||
}
|
||||
|
||||
isRetryable = isRetryableStatusCode(statusCode);
|
||||
errorMessage = `Cache service responded with ${statusCode}`;
|
||||
} catch (error) {
|
||||
isRetryable = true;
|
||||
errorMessage = error.message;
|
||||
}
|
||||
|
||||
core.debug(
|
||||
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
|
||||
);
|
||||
|
||||
if (!isRetryable) {
|
||||
core.debug(`${name} - Error is not retryable`);
|
||||
break;
|
||||
}
|
||||
|
||||
attempt++;
|
||||
}
|
||||
|
||||
throw Error(`${name} failed: ${errorMessage}`);
|
||||
}
|
||||
|
||||
export async function retryTypedResponse<T>(
|
||||
name: string,
|
||||
method: () => Promise<ITypedResponse<T>>,
|
||||
maxAttempts = 2
|
||||
): Promise<ITypedResponse<T>> {
|
||||
return await retry(
|
||||
name,
|
||||
method,
|
||||
(response: ITypedResponse<T>) => response.statusCode,
|
||||
maxAttempts
|
||||
);
|
||||
}
|
||||
|
||||
export async function retryHttpClientResponse<T>(
|
||||
name: string,
|
||||
method: () => Promise<IHttpClientResponse>,
|
||||
maxAttempts = 2
|
||||
): Promise<IHttpClientResponse> {
|
||||
return await retry(
|
||||
name,
|
||||
method,
|
||||
(response: IHttpClientResponse) => response.message.statusCode,
|
||||
maxAttempts
|
||||
);
|
||||
}
|
||||
|
||||
export async function getCacheEntry(
|
||||
keys: string[]
|
||||
): Promise<ArtifactCacheEntry | null> {
|
||||
const httpClient = createHttpClient();
|
||||
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`;
|
||||
|
||||
const response = await retryTypedResponse("getCacheEntry", () =>
|
||||
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource))
|
||||
);
|
||||
if (response.statusCode === 204) {
|
||||
return null;
|
||||
}
|
||||
if (response.statusCode !== 200) {
|
||||
if (!isSuccessStatusCode(response.statusCode)) {
|
||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||
}
|
||||
|
||||
const cacheResult = response.result;
|
||||
if (!cacheResult || !cacheResult.archiveLocation) {
|
||||
const cacheDownloadUrl = cacheResult?.archiveLocation;
|
||||
if (!cacheDownloadUrl) {
|
||||
throw new Error("Cache not found.");
|
||||
}
|
||||
core.setSecret(cacheResult.archiveLocation);
|
||||
core.setSecret(cacheDownloadUrl);
|
||||
core.debug(`Cache Result:`);
|
||||
core.debug(JSON.stringify(cacheResult));
|
||||
|
||||
@ -73,56 +187,210 @@ export async function getCacheEntry(
|
||||
|
||||
async function pipeResponseToStream(
|
||||
response: IHttpClientResponse,
|
||||
stream: NodeJS.WritableStream
|
||||
output: NodeJS.WritableStream
|
||||
): Promise<void> {
|
||||
return new Promise(resolve => {
|
||||
response.message.pipe(stream).on("close", () => {
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
const pipeline = util.promisify(stream.pipeline);
|
||||
await pipeline(response.message, output);
|
||||
}
|
||||
|
||||
export async function downloadCache(
|
||||
cacheEntry: ArtifactCacheEntry,
|
||||
archiveLocation: string,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createWriteStream(archivePath);
|
||||
const httpClient = new HttpClient("actions/cache");
|
||||
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||
const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
|
||||
const downloadResponse = await retryHttpClientResponse(
|
||||
"downloadCache",
|
||||
() => httpClient.get(archiveLocation)
|
||||
);
|
||||
|
||||
// Abort download if no traffic received over the socket.
|
||||
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
|
||||
downloadResponse.message.destroy();
|
||||
core.debug(
|
||||
`Aborting download, socket timed out after ${SocketTimeout} ms`
|
||||
);
|
||||
});
|
||||
|
||||
await pipeResponseToStream(downloadResponse, stream);
|
||||
|
||||
// Validate download size.
|
||||
const contentLengthHeader =
|
||||
downloadResponse.message.headers["content-length"];
|
||||
|
||||
if (contentLengthHeader) {
|
||||
const expectedLength = parseInt(contentLengthHeader);
|
||||
const actualLength = utils.getArchiveFileSize(archivePath);
|
||||
|
||||
if (actualLength != expectedLength) {
|
||||
throw new Error(
|
||||
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
core.debug("Unable to validate download, no Content-Length header");
|
||||
}
|
||||
}
|
||||
|
||||
// Reserve Cache
|
||||
export async function reserveCache(key: string): Promise<number> {
|
||||
const httpClient = createHttpClient();
|
||||
|
||||
const reserveCacheRequest: ReserveCacheRequest = {
|
||||
key
|
||||
};
|
||||
const response = await retryTypedResponse("reserveCache", () =>
|
||||
httpClient.postJson<ReserveCacheResponse>(
|
||||
getCacheApiUrl("caches"),
|
||||
reserveCacheRequest
|
||||
)
|
||||
);
|
||||
return response?.result?.cacheId ?? -1;
|
||||
}
|
||||
|
||||
function getContentRange(start: number, end: number): string {
|
||||
// Format: `bytes start-end/filesize
|
||||
// start and end are inclusive
|
||||
// filesize can be *
|
||||
// For a 200 byte chunk starting at byte 0:
|
||||
// Content-Range: bytes 0-199/*
|
||||
return `bytes ${start}-${end}/*`;
|
||||
}
|
||||
|
||||
async function uploadChunk(
|
||||
httpClient: HttpClient,
|
||||
resourceUrl: string,
|
||||
openStream: () => NodeJS.ReadableStream,
|
||||
start: number,
|
||||
end: number
|
||||
): Promise<void> {
|
||||
core.debug(
|
||||
`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(
|
||||
start,
|
||||
end
|
||||
)}`
|
||||
);
|
||||
const additionalHeaders = {
|
||||
"Content-Type": "application/octet-stream",
|
||||
"Content-Range": getContentRange(start, end)
|
||||
};
|
||||
|
||||
await retryHttpClientResponse(
|
||||
`uploadChunk (start: ${start}, end: ${end})`,
|
||||
() =>
|
||||
httpClient.sendStream(
|
||||
"PATCH",
|
||||
resourceUrl,
|
||||
openStream(),
|
||||
additionalHeaders
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function parseEnvNumber(key: string): number | undefined {
|
||||
const value = Number(process.env[key]);
|
||||
if (Number.isNaN(value) || value < 0) {
|
||||
return undefined;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async function uploadFile(
|
||||
httpClient: HttpClient,
|
||||
cacheId: number,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
// Upload Chunks
|
||||
const fileSize = fs.statSync(archivePath).size;
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs.openSync(archivePath, "r");
|
||||
|
||||
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
|
||||
const MAX_CHUNK_SIZE =
|
||||
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
|
||||
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
|
||||
|
||||
const parallelUploads = [...new Array(concurrency).keys()];
|
||||
core.debug("Awaiting all uploads");
|
||||
let offset = 0;
|
||||
|
||||
try {
|
||||
await Promise.all(
|
||||
parallelUploads.map(async () => {
|
||||
while (offset < fileSize) {
|
||||
const chunkSize = Math.min(
|
||||
fileSize - offset,
|
||||
MAX_CHUNK_SIZE
|
||||
);
|
||||
const start = offset;
|
||||
const end = offset + chunkSize - 1;
|
||||
offset += MAX_CHUNK_SIZE;
|
||||
|
||||
await uploadChunk(
|
||||
httpClient,
|
||||
resourceUrl,
|
||||
() =>
|
||||
fs
|
||||
.createReadStream(archivePath, {
|
||||
fd,
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
})
|
||||
.on("error", error => {
|
||||
throw new Error(
|
||||
`Cache upload failed because file read failed with ${error.Message}`
|
||||
);
|
||||
}),
|
||||
start,
|
||||
end
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
} finally {
|
||||
fs.closeSync(fd);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
async function commitCache(
|
||||
httpClient: HttpClient,
|
||||
cacheId: number,
|
||||
filesize: number
|
||||
): Promise<ITypedResponse<null>> {
|
||||
const commitCacheRequest: CommitCacheRequest = { size: filesize };
|
||||
return await retryTypedResponse("commitCache", () =>
|
||||
httpClient.postJson<null>(
|
||||
getCacheApiUrl(`caches/${cacheId.toString()}`),
|
||||
commitCacheRequest
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export async function saveCache(
|
||||
key: string,
|
||||
cacheId: number,
|
||||
archivePath: string
|
||||
): Promise<void> {
|
||||
const stream = fs.createReadStream(archivePath);
|
||||
const httpClient = createHttpClient();
|
||||
|
||||
const cacheUrl = getCacheUrl();
|
||||
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
|
||||
const bearerCredentialHandler = new BearerCredentialHandler(token);
|
||||
core.debug("Upload cache");
|
||||
await uploadFile(httpClient, cacheId, archivePath);
|
||||
|
||||
const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
|
||||
const postUrl = cacheUrl + resource;
|
||||
|
||||
const restClient = new RestClient("actions/cache", undefined, [
|
||||
bearerCredentialHandler
|
||||
]);
|
||||
|
||||
const requestOptions = getRequestOptions();
|
||||
requestOptions.additionalHeaders = {
|
||||
"Content-Type": "application/octet-stream"
|
||||
};
|
||||
|
||||
const response = await restClient.uploadStream<void>(
|
||||
"POST",
|
||||
postUrl,
|
||||
stream,
|
||||
requestOptions
|
||||
// Commit Cache
|
||||
core.debug("Commiting cache");
|
||||
const cacheSize = utils.getArchiveFileSize(archivePath);
|
||||
const commitCacheResponse = await commitCache(
|
||||
httpClient,
|
||||
cacheId,
|
||||
cacheSize
|
||||
);
|
||||
if (response.statusCode !== 200) {
|
||||
throw new Error(`Cache service responded with ${response.statusCode}`);
|
||||
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
throw new Error(
|
||||
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
|
||||
);
|
||||
}
|
||||
|
||||
core.info("Cache saved successfully");
|
||||
|
@ -18,3 +18,8 @@ export enum Events {
|
||||
Push = "push",
|
||||
PullRequest = "pull_request"
|
||||
}
|
||||
|
||||
// Socket timeout in milliseconds during download. If no traffic is received
|
||||
// over the socket during this period, the socket is destroyed and the download
|
||||
// is aborted.
|
||||
export const SocketTimeout = 5000;
|
||||
|
13
src/contracts.d.ts
vendored
13
src/contracts.d.ts
vendored
@ -4,3 +4,16 @@ export interface ArtifactCacheEntry {
|
||||
creationTime?: string;
|
||||
archiveLocation?: string;
|
||||
}
|
||||
|
||||
export interface CommitCacheRequest {
|
||||
size: number;
|
||||
}
|
||||
|
||||
export interface ReserveCacheRequest {
|
||||
key: string;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface ReserveCacheResponse {
|
||||
cacheId: number;
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ async function run(): Promise<void> {
|
||||
|
||||
try {
|
||||
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
|
||||
if (!cacheEntry) {
|
||||
if (!cacheEntry?.archiveLocation) {
|
||||
core.info(
|
||||
`Cache not found for input keys: ${keys.join(", ")}.`
|
||||
);
|
||||
@ -77,7 +77,10 @@ async function run(): Promise<void> {
|
||||
utils.setCacheState(cacheEntry);
|
||||
|
||||
// Download the cache from the cache entry
|
||||
await cacheHttpClient.downloadCache(cacheEntry, archivePath);
|
||||
await cacheHttpClient.downloadCache(
|
||||
cacheEntry.archiveLocation,
|
||||
archivePath
|
||||
);
|
||||
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.info(
|
||||
|
16
src/save.ts
16
src/save.ts
@ -34,6 +34,15 @@ async function run(): Promise<void> {
|
||||
return;
|
||||
}
|
||||
|
||||
core.debug("Reserving Cache");
|
||||
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
|
||||
if (cacheId == -1) {
|
||||
core.info(
|
||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
core.debug(`Cache ID: ${cacheId}`);
|
||||
const cachePath = utils.resolvePath(
|
||||
core.getInput(Inputs.Path, { required: true })
|
||||
);
|
||||
@ -47,19 +56,20 @@ async function run(): Promise<void> {
|
||||
|
||||
await createTar(archivePath, cachePath);
|
||||
|
||||
const fileSizeLimit = 400 * 1024 * 1024; // 400MB
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSize(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
utils.logWarning(
|
||||
`Cache size of ~${Math.round(
|
||||
archiveFileSize / (1024 * 1024)
|
||||
)} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`
|
||||
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await cacheHttpClient.saveCache(primaryKey, archivePath);
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
await cacheHttpClient.saveCache(cacheId, archivePath);
|
||||
} catch (error) {
|
||||
utils.logWarning(error.message);
|
||||
}
|
||||
|
49
src/tar.ts
49
src/tar.ts
@ -1,14 +1,36 @@
|
||||
import * as core from "@actions/core";
|
||||
import { exec } from "@actions/exec";
|
||||
import * as io from "@actions/io";
|
||||
import { existsSync } from "fs";
|
||||
import * as path from "path";
|
||||
import * as tar from "./tar";
|
||||
|
||||
async function getTarPath(): Promise<string> {
|
||||
export async function isGnuTar(): Promise<boolean> {
|
||||
core.debug("Checking tar --version");
|
||||
let versionOutput = "";
|
||||
await exec("tar --version", [], {
|
||||
ignoreReturnCode: true,
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout: (data: Buffer): string =>
|
||||
(versionOutput += data.toString()),
|
||||
stderr: (data: Buffer): string => (versionOutput += data.toString())
|
||||
}
|
||||
});
|
||||
|
||||
core.debug(versionOutput.trim());
|
||||
return versionOutput.toUpperCase().includes("GNU TAR");
|
||||
}
|
||||
|
||||
async function getTarPath(args: string[]): Promise<string> {
|
||||
// Explicitly use BSD Tar on Windows
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
if (IS_WINDOWS) {
|
||||
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
|
||||
if (existsSync(systemTar)) {
|
||||
return systemTar;
|
||||
} else if (await tar.isGnuTar()) {
|
||||
args.push("--force-local");
|
||||
}
|
||||
}
|
||||
return await io.which("tar", true);
|
||||
@ -16,14 +38,8 @@ async function getTarPath(): Promise<string> {
|
||||
|
||||
async function execTar(args: string[]): Promise<void> {
|
||||
try {
|
||||
await exec(`"${await getTarPath()}"`, args);
|
||||
await exec(`"${await getTarPath(args)}"`, args);
|
||||
} catch (error) {
|
||||
const IS_WINDOWS = process.platform === "win32";
|
||||
if (IS_WINDOWS) {
|
||||
throw new Error(
|
||||
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
|
||||
);
|
||||
}
|
||||
throw new Error(`Tar failed with error: ${error?.message}`);
|
||||
}
|
||||
}
|
||||
@ -34,7 +50,13 @@ export async function extractTar(
|
||||
): Promise<void> {
|
||||
// Create directory to extract tar into
|
||||
await io.mkdirP(targetDirectory);
|
||||
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
|
||||
const args = [
|
||||
"-xz",
|
||||
"-f",
|
||||
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||
"-C",
|
||||
targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
|
||||
];
|
||||
await execTar(args);
|
||||
}
|
||||
|
||||
@ -42,6 +64,13 @@ export async function createTar(
|
||||
archivePath: string,
|
||||
sourceDirectory: string
|
||||
): Promise<void> {
|
||||
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
|
||||
const args = [
|
||||
"-cz",
|
||||
"-f",
|
||||
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||
"-C",
|
||||
sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
|
||||
"."
|
||||
];
|
||||
await execTar(args);
|
||||
}
|
||||
|
Reference in New Issue
Block a user