Compare commits

..

1 Commits

Author SHA1 Message Date
7527073910 Test sudo tar 2019-12-27 11:48:00 -05:00
21 changed files with 6084 additions and 7053 deletions

View File

@ -4,130 +4,50 @@ on:
pull_request: pull_request:
branches: branches:
- master - master
- releases/**
paths-ignore: paths-ignore:
- '**.md' - '**.md'
push: push:
branches: branches:
- master - master
- releases/**
paths-ignore: paths-ignore:
- '**.md' - '**.md'
jobs: jobs:
# Build and unit test test:
build: name: Test on ${{ matrix.os }}
strategy: strategy:
matrix: matrix:
os: [ubuntu-latest, windows-latest, macOS-latest] os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Checkout - uses: actions/checkout@v1
uses: actions/checkout@v2
- name: Setup Node.js - uses: actions/setup-node@v1
uses: actions/setup-node@v1
with: with:
node-version: '12.x' node-version: '12.x'
- name: Determine npm cache directory
- name: Get npm cache directory
id: npm-cache id: npm-cache
run: | run: |
echo "::set-output name=dir::$(npm config get cache)" echo "::set-output name=dir::$(npm config get cache)"
- name: Restore npm cache
uses: actions/cache@v1 - uses: actions/cache@v1
with: with:
path: ${{ steps.npm-cache.outputs.dir }} path: ${{ steps.npm-cache.outputs.dir }}
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: | restore-keys: |
${{ runner.os }}-node- ${{ runner.os }}-node-
- run: npm ci - run: npm ci
- name: Prettier Format Check - name: Prettier Format Check
run: npm run format-check run: npm run format-check
- name: ESLint Check - name: ESLint Check
run: npm run lint run: npm run lint
- name: Build & Test - name: Build & Test
run: npm run test run: npm run test
# End to end save and restore
test-save:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files
shell: bash
run: __tests__/create-cache-files.sh ${{ runner.os }}
- name: Save cache
uses: ./
with:
key: test-${{ runner.os }}-${{ github.run_id }}
path: test-cache
test-restore:
needs: test-save
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
key: test-${{ runner.os }}-${{ github.run_id }}
path: test-cache
- name: Verify cache
shell: bash
run: __tests__/verify-cache-files.sh ${{ runner.os }}
# End to end with proxy
test-proxy-save:
runs-on: ubuntu-latest
container:
image: ubuntu:latest
options: --dns 127.0.0.1
services:
squid-proxy:
image: ubuntu/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files
run: __tests__/create-cache-files.sh proxy
- name: Save cache
uses: ./
with:
key: test-proxy-${{ github.run_id }}
path: test-cache
test-proxy-restore:
needs: test-proxy-save
runs-on: ubuntu-latest
container:
image: ubuntu:latest
options: --dns 127.0.0.1
services:
squid-proxy:
image: ubuntu/squid:latest
ports:
- 3128:3128
env:
https_proxy: http://squid-proxy:3128
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
key: test-proxy-${{ github.run_id }}
path: test-cache
- name: Verify cache
run: __tests__/verify-cache-files.sh proxy

3
.gitignore vendored
View File

@ -94,6 +94,3 @@ typings/
# DynamoDB Local files # DynamoDB Local files
.dynamodb/ .dynamodb/
# Text editor files
.vscode/

View File

@ -1,6 +1,6 @@
# cache # cache
This action allows caching dependencies and build outputs to improve workflow execution time. This GitHub Action allows caching dependencies and build outputs to improve workflow execution time.
<a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a> <a href="https://github.com/actions/cache/actions?query=workflow%3ATests"><img alt="GitHub Actions status" src="https://github.com/actions/cache/workflows/Tests/badge.svg?branch=master&event=push"></a>
@ -63,24 +63,21 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us
- [C# - Nuget](./examples.md#c---nuget) - [C# - Nuget](./examples.md#c---nuget)
- [Elixir - Mix](./examples.md#elixir---mix) - [Elixir - Mix](./examples.md#elixir---mix)
- [Go - Modules](./examples.md#go---modules) - [Go - Modules](./examples.md#go---modules)
- [Haskell - Cabal](./examples.md#haskell---cabal)
- [Java - Gradle](./examples.md#java---gradle) - [Java - Gradle](./examples.md#java---gradle)
- [Java - Maven](./examples.md#java---maven) - [Java - Maven](./examples.md#java---maven)
- [Node - npm](./examples.md#node---npm) - [Node - npm](./examples.md#node---npm)
- [Node - Yarn](./examples.md#node---yarn) - [Node - Yarn](./examples.md#node---yarn)
- [PHP - Composer](./examples.md#php---composer) - [PHP - Composer](./examples.md#php---composer)
- [Python - pip](./examples.md#python---pip) - [Python - pip](./examples.md#python---pip)
- [R - renv](./examples.md#r---renv) - [Ruby - Gem](./examples.md#ruby---gem)
- [Ruby - Bundler](./examples.md#ruby---bundler)
- [Rust - Cargo](./examples.md#rust---cargo) - [Rust - Cargo](./examples.md#rust---cargo)
- [Scala - SBT](./examples.md#scala---sbt)
- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) - [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) - [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](./examples.md#swift---swift-package-manager)
## Cache Limits ## Cache Limits
A repository can have up to 5GB of caches. Once the 5GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted. Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted.
## Skipping steps based on cache-hit ## Skipping steps based on cache-hit

View File

@ -1,144 +0,0 @@
import { retry } from "../src/cacheHttpClient";
import * as testUtils from "../src/utils/testUtils";
afterEach(() => {
testUtils.clearInputs();
});
interface TestResponse {
statusCode: number;
result: string | null;
}
function handleResponse(
response: TestResponse | undefined
): Promise<TestResponse> {
if (!response) {
fail("Retry method called too many times");
}
if (response.statusCode === 999) {
throw Error("Test Error");
} else {
return Promise.resolve(response);
}
}
async function testRetryExpectingResult(
responses: Array<TestResponse>,
expectedResult: string | null
): Promise<void> {
responses = responses.reverse(); // Reverse responses since we pop from end
const actualResult = await retry(
"test",
() => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
);
expect(actualResult.result).toEqual(expectedResult);
}
async function testRetryExpectingError(
responses: Array<TestResponse>
): Promise<void> {
responses = responses.reverse(); // Reverse responses since we pop from end
expect(
retry(
"test",
() => handleResponse(responses.pop()),
(response: TestResponse) => response.statusCode
)
).rejects.toBeInstanceOf(Error);
}
test("retry works on successful response", async () => {
await testRetryExpectingResult(
[
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry works after retryable status code", async () => {
await testRetryExpectingResult(
[
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry fails after exhausting retries", async () => {
await testRetryExpectingError([
{
statusCode: 503,
result: null
},
{
statusCode: 503,
result: null
},
{
statusCode: 200,
result: "Ok"
}
]);
});
test("retry fails after non-retryable status code", async () => {
await testRetryExpectingError([
{
statusCode: 500,
result: null
},
{
statusCode: 200,
result: "Ok"
}
]);
});
test("retry works after error", async () => {
await testRetryExpectingResult(
[
{
statusCode: 999,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
"Ok"
);
});
test("retry returns after client error", async () => {
await testRetryExpectingResult(
[
{
statusCode: 400,
result: null
},
{
statusCode: 200,
result: "Ok"
}
],
null
);
});

View File

@ -1,11 +0,0 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
mkdir test-cache
echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt

View File

@ -248,10 +248,7 @@ test("restore with cache found", async () => {
expect(getCacheMock).toHaveBeenCalledWith([key]); expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(extractTarMock).toHaveBeenCalledTimes(1); expect(extractTarMock).toHaveBeenCalledTimes(1);
@ -315,10 +312,7 @@ test("restore with a pull request event and cache found", async () => {
expect(getCacheMock).toHaveBeenCalledWith([key]); expect(getCacheMock).toHaveBeenCalledWith([key]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
@ -383,10 +377,7 @@ test("restore with cache found for restore key", async () => {
expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]);
expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry);
expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1);
expect(downloadCacheMock).toHaveBeenCalledWith( expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath);
cacheEntry.archiveLocation,
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);

View File

@ -194,7 +194,7 @@ test("save with large cache outputs warning", async () => {
const createTarMock = jest.spyOn(tar, "createTar"); const createTarMock = jest.spyOn(tar, "createTar");
const cacheSize = 6 * 1024 * 1024 * 1024; //~6GB, over the 5GB limit const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
return cacheSize; return cacheSize;
}); });
@ -208,63 +208,12 @@ test("save with large cache outputs warning", async () => {
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith( expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache."
); );
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
}); });
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with server error outputs warning", async () => { test("save with server error outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed"); const failedMock = jest.spyOn(core, "setFailed");
@ -291,13 +240,6 @@ test("save with server error outputs warning", async () => {
const cachePath = path.resolve(inputPath); const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath); testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar"); const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest const saveCacheMock = jest
@ -308,16 +250,13 @@ test("save with server error outputs warning", async () => {
await run(); await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz"); const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1); expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
@ -350,29 +289,18 @@ test("save with valid inputs uploads a cache", async () => {
const cachePath = path.resolve(inputPath); const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath); testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar"); const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run(); await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
const archivePath = path.join("/foo/bar", "cache.tgz"); const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1); expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath);
expect(failedMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0);
}); });

View File

@ -2,8 +2,6 @@ import * as exec from "@actions/exec";
import * as io from "@actions/io"; import * as io from "@actions/io";
import * as tar from "../src/tar"; import * as tar from "../src/tar";
import fs = require("fs");
jest.mock("@actions/exec"); jest.mock("@actions/exec");
jest.mock("@actions/io"); jest.mock("@actions/io");
@ -13,19 +11,17 @@ beforeAll(() => {
}); });
}); });
test("extract BSD tar", async () => { test("extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP"); const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec"); const execMock = jest.spyOn(exec, "exec");
const IS_WINDOWS = process.platform === "win32"; const archivePath = "cache.tar";
const archivePath = IS_WINDOWS
? `${process.env["windir"]}\\fakepath\\cache.tar`
: "cache.tar";
const targetDirectory = "~/.npm/cache"; const targetDirectory = "~/.npm/cache";
await tar.extractTar(archivePath, targetDirectory); await tar.extractTar(archivePath, targetDirectory);
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe` ? `${process.env["windir"]}\\System32\\tar.exe`
: "tar"; : "tar";
@ -33,37 +29,13 @@ test("extract BSD tar", async () => {
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-xz", "-xz",
"-f", "-f",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, archivePath,
"-C", "-C",
IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory targetDirectory
]); ]);
}); });
test("extract GNU tar", async () => { test("create tar", async () => {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
jest.spyOn(fs, "existsSync").mockReturnValueOnce(false);
jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true));
const execMock = jest.spyOn(exec, "exec");
const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`;
const targetDirectory = "~/.npm/cache";
await tar.extractTar(archivePath, targetDirectory);
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [
"-xz",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
targetDirectory?.replace(/\\/g, "/"),
"--force-local"
]);
}
});
test("create BSD tar", async () => {
const execMock = jest.spyOn(exec, "exec"); const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar"; const archivePath = "cache.tar";
@ -78,9 +50,9 @@ test("create BSD tar", async () => {
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-cz", "-cz",
"-f", "-f",
IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, archivePath,
"-C", "-C",
IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory, sourceDirectory,
"." "."
]); ]);
}); });

View File

@ -1,30 +0,0 @@
#!/bin/sh
# Validate args
prefix="$1"
if [ -z "$prefix" ]; then
echo "Must supply prefix argument"
exit 1
fi
# Sanity check GITHUB_RUN_ID defined
if [ -z "$GITHUB_RUN_ID" ]; then
echo "GITHUB_RUN_ID not defined"
exit 1
fi
# Verify file exists
file="test-cache/test-file.txt"
echo "Checking for $file"
if [ ! -e $file ]; then
echo "File does not exist"
exit 1
fi
# Verify file content
content="$(cat $file)"
echo "File content:\n$content"
if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then
echo "Unexpected file content"
exit 1
fi

View File

@ -1,5 +1,5 @@
name: 'Cache' name: 'Cache'
description: 'Cache artifacts like dependencies and build outputs to improve workflow execution time' description: 'Cache dependencies and build outputs to improve workflow execution time'
author: 'GitHub' author: 'GitHub'
inputs: inputs:
path: path:
@ -21,4 +21,4 @@ runs:
post-if: 'success()' post-if: 'success()'
branding: branding:
icon: 'archive' icon: 'archive'
color: 'gray-dark' color: 'gray-dark'

6005
dist/restore/index.js vendored

File diff suppressed because it is too large Load Diff

6014
dist/save/index.js vendored

File diff suppressed because it is too large Load Diff

View File

@ -3,20 +3,16 @@
- [C# - NuGet](#c---nuget) - [C# - NuGet](#c---nuget)
- [Elixir - Mix](#elixir---mix) - [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules) - [Go - Modules](#go---modules)
- [Haskell - Cabal](#haskell---cabal)
- [Java - Gradle](#java---gradle) - [Java - Gradle](#java---gradle)
- [Java - Maven](#java---maven) - [Java - Maven](#java---maven)
- [Node - npm](#node---npm) - [Node - npm](#node---npm)
- [Node - Yarn](#node---yarn) - [Node - Yarn](#node---yarn)
- [PHP - Composer](#php---composer) - [PHP - Composer](#php---composer)
- [Python - pip](#python---pip) - [Python - pip](#python---pip)
- [R - renv](#r---renv) - [Ruby - Gem](#ruby---gem)
- [Ruby - Bundler](#ruby---bundler)
- [Rust - Cargo](#rust---cargo) - [Rust - Cargo](#rust---cargo)
- [Scala - SBT](#scala---sbt)
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage) - [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
- [Swift - Swift Package Manager](#swift---swift-package-manager)
## C# - NuGet ## C# - NuGet
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
@ -66,28 +62,6 @@ steps:
${{ runner.os }}-go- ${{ runner.os }}-go-
``` ```
## Haskell - Cabal
We cache the elements of the Cabal store separately, as the entirety of `~/.cabal` can grow very large for projects with many dependencies.
```yaml
- uses: actions/cache@v1
name: Cache ~/.cabal/packages
with:
path: ~/.cabal/packages
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-packages
- uses: actions/cache@v1
name: Cache ~/.cabal/store
with:
path: ~/.cabal/store
key: ${{ runner.os }}-${{ matrix.ghc }}-cabal-store
- uses: actions/cache@v1
name: Cache dist-newstyle
with:
path: dist-newstyle
key: ${{ runner.os }}-${{ matrix.ghc }}-dist-newstyle
```
## Java - Gradle ## Java - Gradle
```yaml ```yaml
@ -250,64 +224,15 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu.
${{ runner.os }}-pip- ${{ runner.os }}-pip-
``` ```
## R - renv ## Ruby - Gem
For renv, the cache directory will vary by OS. Look at https://rstudio.github.io/renv/articles/renv.html#cache
Locations:
- Ubuntu: `~/.local/share/renv`
- macOS: `~/Library/Application Support/renv`
- Windows: `%LOCALAPPDATA%/renv`
### Simple example
```yaml
- uses: actions/cache@v1
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
Replace `~/.local/share/renv` with the correct `path` if not using Ubuntu.
### Multiple OS's in a workflow
```yaml
- uses: actions/cache@v1
if: startsWith(runner.os, 'Linux')
with:
path: ~/.local/share/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'macOS')
with:
path: ~/Library/Application Support/renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
- uses: actions/cache@v1
if: startsWith(runner.os, 'Windows')
with:
path: ~\AppData\Local\renv
key: ${{ runner.os }}-renv-${{ hashFiles('**/renv.lock') }}
restore-keys: |
${{ runner.os }}-renv-
```
## Ruby - Bundler
```yaml ```yaml
- uses: actions/cache@v1 - uses: actions/cache@v1
with: with:
path: vendor/bundle path: vendor/bundle
key: ${{ runner.os }}-gems-${{ hashFiles('**/Gemfile.lock') }} key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }}
restore-keys: | restore-keys: |
${{ runner.os }}-gems- ${{ runner.os }}-gem-
``` ```
When dependencies are installed later in the workflow, we must specify the same path for the bundler. When dependencies are installed later in the workflow, we must specify the same path for the bundler.
@ -338,21 +263,6 @@ When dependencies are installed later in the workflow, we must specify the same
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
``` ```
## Scala - SBT
```yaml
- name: Cache SBT ivy cache
uses: actions/cache@v1
with:
path: ~/.ivy2/cache
key: ${{ runner.os }}-sbt-ivy-cache-${{ hashFiles('**/build.sbt') }}
- name: Cache SBT
uses: actions/cache@v1
with:
path: ~/.sbt
key: ${{ runner.os }}-sbt-${{ hashFiles('**/build.sbt') }}
```
## Swift, Objective-C - Carthage ## Swift, Objective-C - Carthage
```yaml ```yaml
@ -374,14 +284,3 @@ When dependencies are installed later in the workflow, we must specify the same
restore-keys: | restore-keys: |
${{ runner.os }}-pods- ${{ runner.os }}-pods-
``` ```
## Swift - Swift Package Manager
```yaml
- uses: actions/cache@v1
with:
path: .build
key: ${{ runner.os }}-spm-${{ hashFiles('**/Package.resolved') }}
restore-keys: |
${{ runner.os }}-spm-
```

67
package-lock.json generated
View File

@ -1,46 +1,19 @@
{ {
"name": "cache", "name": "cache",
"version": "1.2.0", "version": "1.0.3",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {
"@actions/core": { "@actions/core": {
"version": "1.10.0", "version": "1.2.0",
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.0.tgz",
"integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", "integrity": "sha512-ZKdyhlSlyz38S6YFfPnyNgCDZuAF2T0Qv5eHflNWytPS8Qjvz39bZFMry9Bb/dpSnqWcNeav5yM2CTYpJeY+Dw=="
"requires": {
"@actions/http-client": "^2.0.1",
"uuid": "^8.3.2"
},
"dependencies": {
"@actions/http-client": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.1.0.tgz",
"integrity": "sha512-BonhODnXr3amchh4qkmjPMUO8mFi/zLaaCeCAJZqch8iQqyDnVIkySjB38VHAC8IJ+bnlgfOqlhpyCUZHlQsqw==",
"requires": {
"tunnel": "^0.0.6"
}
},
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
}
}, },
"@actions/exec": { "@actions/exec": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz", "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.0.1.tgz",
"integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ==" "integrity": "sha512-nvFkxwiicvpzNiCBF4wFBDfnBvi7xp/as7LE1hBxBxKG2L29+gkIPBiLKMVORL+Hg3JNf07AKRfl0V5djoypjQ=="
}, },
"@actions/http-client": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.6.tgz",
"integrity": "sha512-LGmio4w98UyGX33b/W6V6Nx/sQHRXZ859YlMkn36wPsXPB82u8xTVlA/Dq2DXrm6lEq9RVmisRJa1c+HETAIJA==",
"requires": {
"tunnel": "0.0.6"
}
},
"@actions/io": { "@actions/io": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz", "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.0.1.tgz",
@ -2881,9 +2854,9 @@
"dev": true "dev": true
}, },
"handlebars": { "handlebars": {
"version": "4.5.3", "version": "4.5.1",
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.3.tgz", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz",
"integrity": "sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==", "integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==",
"dev": true, "dev": true,
"requires": { "requires": {
"neo-async": "^2.6.0", "neo-async": "^2.6.0",
@ -5960,9 +5933,9 @@
} }
}, },
"tunnel": { "tunnel": {
"version": "0.0.6", "version": "0.0.4",
"resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz",
"integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" "integrity": "sha1-LTeFoVjBdMmhbcLARuxfxfF0IhM="
}, },
"tunnel-agent": { "tunnel-agent": {
"version": "0.6.0", "version": "0.6.0",
@ -6000,6 +5973,15 @@
"integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==", "integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==",
"dev": true "dev": true
}, },
"typed-rest-client": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.5.0.tgz",
"integrity": "sha512-DVZRlmsfnTjp6ZJaatcdyvvwYwbWvR4YDNFDqb+qdTxpvaVP99YCpBkA8rxsLtAPjBVoDe4fNsnMIdZTiPuKWg==",
"requires": {
"tunnel": "0.0.4",
"underscore": "1.8.3"
}
},
"typescript": { "typescript": {
"version": "3.7.3", "version": "3.7.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz",
@ -6007,9 +5989,9 @@
"dev": true "dev": true
}, },
"uglify-js": { "uglify-js": {
"version": "3.7.3", "version": "3.6.7",
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.7.3.tgz", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz",
"integrity": "sha512-7tINm46/3puUA4hCkKYo4Xdts+JDaVC9ZPRcG8Xw9R4nhO/gZgUM3TENq8IF4Vatk8qCig4MzP/c8G4u2BkVQg==", "integrity": "sha512-4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A==",
"dev": true, "dev": true,
"optional": true, "optional": true,
"requires": { "requires": {
@ -6017,6 +5999,11 @@
"source-map": "~0.6.1" "source-map": "~0.6.1"
} }
}, },
"underscore": {
"version": "1.8.3",
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz",
"integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI="
},
"union-value": { "union-value": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz",

View File

@ -1,6 +1,6 @@
{ {
"name": "cache", "name": "cache",
"version": "1.2.0", "version": "1.0.3",
"private": true, "private": true,
"description": "Cache dependencies and build outputs", "description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js", "main": "dist/restore/index.js",
@ -24,10 +24,10 @@
"author": "GitHub", "author": "GitHub",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.10.0", "@actions/core": "^1.2.0",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
"@actions/http-client": "^1.0.6",
"@actions/io": "^1.0.1", "@actions/io": "^1.0.1",
"typed-rest-client": "^1.5.0",
"uuid": "^3.3.3" "uuid": "^3.3.3"
}, },
"devDependencies": { "devDependencies": {

View File

@ -1,66 +1,26 @@
import * as core from "@actions/core"; import * as core from "@actions/core";
import { HttpClient, HttpCodes } from "@actions/http-client";
import { BearerCredentialHandler } from "@actions/http-client/auth";
import {
IHttpClientResponse,
IRequestOptions,
ITypedResponse
} from "@actions/http-client/interfaces";
import * as fs from "fs"; import * as fs from "fs";
import * as stream from "stream"; import { BearerCredentialHandler } from "typed-rest-client/Handlers";
import * as util from "util"; import { HttpClient } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import { IRequestOptions, RestClient } from "typed-rest-client/RestClient";
import { ArtifactCacheEntry } from "./contracts";
import { SocketTimeout } from "./constants"; function getCacheUrl(): string {
import {
ArtifactCacheEntry,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
} from "./contracts";
import * as utils from "./utils/actionUtils";
function isSuccessStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
return statusCode >= 200 && statusCode < 300;
}
function isServerErrorStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return true;
}
return statusCode >= 500;
}
function isRetryableStatusCode(statusCode?: number): boolean {
if (!statusCode) {
return false;
}
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(resource: string): string {
// Ideally we just use ACTIONS_CACHE_URL // Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = ( const cacheUrl: string = (
process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_CACHE_URL"] ||
process.env["ACTIONS_RUNTIME_URL"] || process.env["ACTIONS_RUNTIME_URL"] ||
"" ""
).replace("pipelines", "artifactcache"); ).replace("pipelines", "artifactcache");
if (!baseUrl) { if (!cacheUrl) {
throw new Error( throw new Error(
"Cache Service Url not found, unable to restore cache." "Cache Service Url not found, unable to restore cache."
); );
} }
const url = `${baseUrl}_apis/artifactcache/${resource}`; core.debug(`Cache Url: ${cacheUrl}`);
core.debug(`Resource Url: ${url}`); return cacheUrl;
return url;
} }
function createAcceptHeader(type: string, apiVersion: string): string { function createAcceptHeader(type: string, apiVersion: string): string {
@ -69,116 +29,42 @@ function createAcceptHeader(type: string, apiVersion: string): string {
function getRequestOptions(): IRequestOptions { function getRequestOptions(): IRequestOptions {
const requestOptions: IRequestOptions = { const requestOptions: IRequestOptions = {
headers: { acceptHeader: createAcceptHeader("application/json", "5.2-preview.1")
Accept: createAcceptHeader("application/json", "6.0-preview.1")
}
}; };
return requestOptions; return requestOptions;
} }
function createHttpClient(): HttpClient {
const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
return new HttpClient(
"actions/cache",
[bearerCredentialHandler],
getRequestOptions()
);
}
export async function retry<T>(
name: string,
method: () => Promise<T>,
getStatusCode: (T) => number | undefined,
maxAttempts = 2
): Promise<T> {
let response: T | undefined = undefined;
let statusCode: number | undefined = undefined;
let isRetryable = false;
let errorMessage = "";
let attempt = 1;
while (attempt <= maxAttempts) {
try {
response = await method();
statusCode = getStatusCode(response);
if (!isServerErrorStatusCode(statusCode)) {
return response;
}
isRetryable = isRetryableStatusCode(statusCode);
errorMessage = `Cache service responded with ${statusCode}`;
} catch (error) {
isRetryable = true;
errorMessage = error.message;
}
core.debug(
`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`
);
if (!isRetryable) {
core.debug(`${name} - Error is not retryable`);
break;
}
attempt++;
}
throw Error(`${name} failed: ${errorMessage}`);
}
export async function retryTypedResponse<T>(
name: string,
method: () => Promise<ITypedResponse<T>>,
maxAttempts = 2
): Promise<ITypedResponse<T>> {
return await retry(
name,
method,
(response: ITypedResponse<T>) => response.statusCode,
maxAttempts
);
}
export async function retryHttpClientResponse<T>(
name: string,
method: () => Promise<IHttpClientResponse>,
maxAttempts = 2
): Promise<IHttpClientResponse> {
return await retry(
name,
method,
(response: IHttpClientResponse) => response.message.statusCode,
maxAttempts
);
}
export async function getCacheEntry( export async function getCacheEntry(
keys: string[] keys: string[]
): Promise<ArtifactCacheEntry | null> { ): Promise<ArtifactCacheEntry | null> {
const httpClient = createHttpClient(); const cacheUrl = getCacheUrl();
const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
const response = await retryTypedResponse("getCacheEntry", () => const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(
httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) keys.join(",")
)}`;
const restClient = new RestClient("actions/cache", cacheUrl, [
bearerCredentialHandler
]);
const response = await restClient.get<ArtifactCacheEntry>(
resource,
getRequestOptions()
); );
if (response.statusCode === 204) { if (response.statusCode === 204) {
return null; return null;
} }
if (!isSuccessStatusCode(response.statusCode)) { if (response.statusCode !== 200) {
throw new Error(`Cache service responded with ${response.statusCode}`); throw new Error(`Cache service responded with ${response.statusCode}`);
} }
const cacheResult = response.result; const cacheResult = response.result;
const cacheDownloadUrl = cacheResult?.archiveLocation; if (!cacheResult || !cacheResult.archiveLocation) {
if (!cacheDownloadUrl) {
throw new Error("Cache not found."); throw new Error("Cache not found.");
} }
core.setSecret(cacheDownloadUrl); core.setSecret(cacheResult.archiveLocation);
core.debug(`Cache Result:`); core.debug(`Cache Result:`);
core.debug(JSON.stringify(cacheResult)); core.debug(JSON.stringify(cacheResult));
@ -187,210 +73,56 @@ export async function getCacheEntry(
async function pipeResponseToStream( async function pipeResponseToStream(
response: IHttpClientResponse, response: IHttpClientResponse,
output: NodeJS.WritableStream stream: NodeJS.WritableStream
): Promise<void> { ): Promise<void> {
const pipeline = util.promisify(stream.pipeline); return new Promise(resolve => {
await pipeline(response.message, output); response.message.pipe(stream).on("close", () => {
resolve();
});
});
} }
export async function downloadCache( export async function downloadCache(
archiveLocation: string, cacheEntry: ArtifactCacheEntry,
archivePath: string archivePath: string
): Promise<void> { ): Promise<void> {
const stream = fs.createWriteStream(archivePath); const stream = fs.createWriteStream(archivePath);
const httpClient = new HttpClient("actions/cache"); const httpClient = new HttpClient("actions/cache");
const downloadResponse = await retryHttpClientResponse( // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
"downloadCache", const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!);
() => httpClient.get(archiveLocation)
);
// Abort download if no traffic received over the socket.
downloadResponse.message.socket.setTimeout(SocketTimeout, () => {
downloadResponse.message.destroy();
core.debug(
`Aborting download, socket timed out after ${SocketTimeout} ms`
);
});
await pipeResponseToStream(downloadResponse, stream); await pipeResponseToStream(downloadResponse, stream);
// Validate download size.
const contentLengthHeader =
downloadResponse.message.headers["content-length"];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSize(archivePath);
if (actualLength != expectedLength) {
throw new Error(
`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`
);
}
} else {
core.debug("Unable to validate download, no Content-Length header");
}
}
// Reserve Cache
export async function reserveCache(key: string): Promise<number> {
const httpClient = createHttpClient();
const reserveCacheRequest: ReserveCacheRequest = {
key
};
const response = await retryTypedResponse("reserveCache", () =>
httpClient.postJson<ReserveCacheResponse>(
getCacheApiUrl("caches"),
reserveCacheRequest
)
);
return response?.result?.cacheId ?? -1;
}
function getContentRange(start: number, end: number): string {
// Format: `bytes start-end/filesize
// start and end are inclusive
// filesize can be *
// For a 200 byte chunk starting at byte 0:
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
async function uploadChunk(
httpClient: HttpClient,
resourceUrl: string,
openStream: () => NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
core.debug(
`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(
start,
end
)}`
);
const additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
await retryHttpClientResponse(
`uploadChunk (start: ${start}, end: ${end})`,
() =>
httpClient.sendStream(
"PATCH",
resourceUrl,
openStream(),
additionalHeaders
)
);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile(
httpClient: HttpClient,
cacheId: number,
archivePath: string
): Promise<void> {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
MAX_CHUNK_SIZE
);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
await uploadChunk(
httpClient,
resourceUrl,
() =>
fs
.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
})
.on("error", error => {
throw new Error(
`Cache upload failed because file read failed with ${error.Message}`
);
}),
start,
end
);
}
})
);
} finally {
fs.closeSync(fd);
}
return;
}
async function commitCache(
httpClient: HttpClient,
cacheId: number,
filesize: number
): Promise<ITypedResponse<null>> {
const commitCacheRequest: CommitCacheRequest = { size: filesize };
return await retryTypedResponse("commitCache", () =>
httpClient.postJson<null>(
getCacheApiUrl(`caches/${cacheId.toString()}`),
commitCacheRequest
)
);
} }
export async function saveCache( export async function saveCache(
cacheId: number, key: string,
archivePath: string archivePath: string
): Promise<void> { ): Promise<void> {
const httpClient = createHttpClient(); const stream = fs.createReadStream(archivePath);
core.debug("Upload cache"); const cacheUrl = getCacheUrl();
await uploadFile(httpClient, cacheId, archivePath); const token = process.env["ACTIONS_RUNTIME_TOKEN"] || "";
const bearerCredentialHandler = new BearerCredentialHandler(token);
// Commit Cache const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`;
core.debug("Commiting cache"); const postUrl = cacheUrl + resource;
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = await commitCache( const restClient = new RestClient("actions/cache", undefined, [
httpClient, bearerCredentialHandler
cacheId, ]);
cacheSize
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream"
};
const response = await restClient.uploadStream<void>(
"POST",
postUrl,
stream,
requestOptions
); );
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { if (response.statusCode !== 200) {
throw new Error( throw new Error(`Cache service responded with ${response.statusCode}`);
`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`
);
} }
core.info("Cache saved successfully"); core.info("Cache saved successfully");

View File

@ -18,8 +18,3 @@ export enum Events {
Push = "push", Push = "push",
PullRequest = "pull_request" PullRequest = "pull_request"
} }
// Socket timeout in milliseconds during download. If no traffic is received
// over the socket during this period, the socket is destroyed and the download
// is aborted.
export const SocketTimeout = 5000;

13
src/contracts.d.ts vendored
View File

@ -4,16 +4,3 @@ export interface ArtifactCacheEntry {
creationTime?: string; creationTime?: string;
archiveLocation?: string; archiveLocation?: string;
} }
export interface CommitCacheRequest {
size: number;
}
export interface ReserveCacheRequest {
key: string;
version?: string;
}
export interface ReserveCacheResponse {
cacheId: number;
}

View File

@ -60,7 +60,7 @@ async function run(): Promise<void> {
try { try {
const cacheEntry = await cacheHttpClient.getCacheEntry(keys); const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
if (!cacheEntry?.archiveLocation) { if (!cacheEntry) {
core.info( core.info(
`Cache not found for input keys: ${keys.join(", ")}.` `Cache not found for input keys: ${keys.join(", ")}.`
); );
@ -77,10 +77,7 @@ async function run(): Promise<void> {
utils.setCacheState(cacheEntry); utils.setCacheState(cacheEntry);
// Download the cache from the cache entry // Download the cache from the cache entry
await cacheHttpClient.downloadCache( await cacheHttpClient.downloadCache(cacheEntry, archivePath);
cacheEntry.archiveLocation,
archivePath
);
const archiveFileSize = utils.getArchiveFileSize(archivePath); const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info( core.info(

View File

@ -34,15 +34,6 @@ async function run(): Promise<void> {
return; return;
} }
core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
if (cacheId == -1) {
core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
return;
}
core.debug(`Cache ID: ${cacheId}`);
const cachePath = utils.resolvePath( const cachePath = utils.resolvePath(
core.getInput(Inputs.Path, { required: true }) core.getInput(Inputs.Path, { required: true })
); );
@ -56,20 +47,19 @@ async function run(): Promise<void> {
await createTar(archivePath, cachePath); await createTar(archivePath, cachePath);
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit const fileSizeLimit = 400 * 1024 * 1024; // 400MB
const archiveFileSize = utils.getArchiveFileSize(archivePath); const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`); core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) { if (archiveFileSize > fileSizeLimit) {
utils.logWarning( utils.logWarning(
`Cache size of ~${Math.round( `Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024) archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.` )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`
); );
return; return;
} }
core.debug(`Saving Cache (ID: ${cacheId})`); await cacheHttpClient.saveCache(primaryKey, archivePath);
await cacheHttpClient.saveCache(cacheId, archivePath);
} catch (error) { } catch (error) {
utils.logWarning(error.message); utils.logWarning(error.message);
} }

View File

@ -1,36 +1,14 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec"; import { exec } from "@actions/exec";
import * as io from "@actions/io"; import * as io from "@actions/io";
import { existsSync } from "fs"; import { existsSync } from "fs";
import * as path from "path";
import * as tar from "./tar";
export async function isGnuTar(): Promise<boolean> { async function getTarPath(): Promise<string> {
core.debug("Checking tar --version");
let versionOutput = "";
await exec("tar --version", [], {
ignoreReturnCode: true,
silent: true,
listeners: {
stdout: (data: Buffer): string =>
(versionOutput += data.toString()),
stderr: (data: Buffer): string => (versionOutput += data.toString())
}
});
core.debug(versionOutput.trim());
return versionOutput.toUpperCase().includes("GNU TAR");
}
async function getTarPath(args: string[]): Promise<string> {
// Explicitly use BSD Tar on Windows // Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32"; const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) { if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) { if (existsSync(systemTar)) {
return systemTar; return systemTar;
} else if (await tar.isGnuTar()) {
args.push("--force-local");
} }
} }
return await io.which("tar", true); return await io.which("tar", true);
@ -38,8 +16,16 @@ async function getTarPath(args: string[]): Promise<string> {
async function execTar(args: string[]): Promise<void> { async function execTar(args: string[]): Promise<void> {
try { try {
await exec(`"${await getTarPath(args)}"`, args); const tarPath = await getTarPath();
const tarExec = process.platform !== "win32" ? `sudo ${tarPath}` : tarPath;
await exec(`"${tarExec}"`, args);
} catch (error) { } catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
);
}
throw new Error(`Tar failed with error: ${error?.message}`); throw new Error(`Tar failed with error: ${error?.message}`);
} }
} }
@ -50,13 +36,7 @@ export async function extractTar(
): Promise<void> { ): Promise<void> {
// Create directory to extract tar into // Create directory to extract tar into
await io.mkdirP(targetDirectory); await io.mkdirP(targetDirectory);
const args = [ const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
"-xz",
"-f",
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-C",
targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/")
];
await execTar(args); await execTar(args);
} }
@ -64,13 +44,6 @@ export async function createTar(
archivePath: string, archivePath: string,
sourceDirectory: string sourceDirectory: string
): Promise<void> { ): Promise<void> {
const args = [ const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
"-cz",
"-f",
archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"),
"-C",
sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"),
"."
];
await execTar(args); await execTar(args);
} }