Compare commits

...

25 Commits

Author SHA1 Message Date
a7c34adf76 Merge pull request #894 from actions/kotewar/update-toolkit-version
Fix for the download stuck problem
2022-08-11 14:56:44 +05:30
83394c99b7 Updated cache version in license file 2022-08-11 07:59:07 +00:00
e839c25979 Updated actions/cache version to 3.0.3 2022-08-11 07:57:29 +00:00
33a923d660 Added release information 2022-08-09 16:48:03 +00:00
a404368986 Updated actions/cache version to 3.0.2 2022-08-09 16:43:43 +00:00
f4278025ab Merge pull request #887 from actions/pdotl-version-patch
Update toolkit's cache npm module to latest. Bump cache version to v3.0.6
2022-08-05 12:38:26 +05:30
9916fe1701 Update cache version in licences 2022-08-04 10:46:56 +00:00
318935ef66 Update README and RELEASES 2022-08-04 10:37:56 +00:00
85efbb58b9 Update cache npm module to latest 2022-08-04 10:00:08 +00:00
4387dbc81a Merge pull request #835 from shivamarora1/clojure-lein-example
feat: 🎸 add example for clojure lein project deps
2022-08-01 13:27:13 +05:30
71e3ee5cce Add phantsure to PR reviewers 2022-07-28 00:10:09 +05:30
c316eb7911 Add phantsure to issue assignment 2022-07-28 00:09:39 +05:30
0865c47f36 new release (#855) 2022-07-13 10:27:51 +05:30
354a2ae15e Consuming 3.0 actions/cache (#834)
* Consuming 3.0 actions/cache

* formatting and error

* updated package version

* resolve package

* dist

* review comment

* dist

* dist
2022-07-07 21:56:17 +05:30
baed3516c3 Merge pull request #530 from axelson/document-where-to-cache
Make it more obvious that the cache call does double duty
2022-06-27 10:56:37 +05:30
8829e97be1 Update README.md
Co-authored-by: Lucas Costi <lucascosti@users.noreply.github.com>
2022-06-27 10:48:52 +05:30
eec8cd3f5f Merge pull request #836 from actions/vsvipul/fix-auto-assign
Add kotewar and remove phantsure from auto-assignees lists
2022-06-26 20:03:54 +05:30
5cc84c0123 Add kotewar and remove phantsure from auto-assignees lists 2022-06-26 05:26:49 +00:00
a0e530f115 feat: 🎸 add example for clojure lein project deps
In Clojure, Lein tool is used to generate template for various projects.
Lein project metadata (including project dependencies) are stored in
prject.clj (in root directory) file. Lein downloads dependencies in
classpath (~/.m2/repository). So here I am caching ~/.m2/repository path
for reusing cache in subsequent builds.
2022-06-26 00:39:24 +05:30
afc669e7fc Merge pull request #819 from mpilgrem/haskell-stack-example
Adapt existing Haskell Stack example for Windows
2022-06-23 16:13:06 +05:30
a0efc56c52 Use pull_request_target instead of pull_request
Fixes autoassign reviewer for PRs from forked repos.
2022-06-21 22:38:08 +05:30
d25c51bbfd Adapt existing Haskell Stack example for Windows
The default `STACK_ROOT` is `~/.stack` only on Unix-like operating systems. On Windows, the default is `%APPDATA%/stack` (usually `%HOME%\AppData\Roaming\stack`).

On Unix-like OSs, Stack stores GHC and other tools in a `programs` directory in the `STACK_ROOT`. On Windows, Stack stores those tools and MSYS2 in `%LOCALAPPDATA%\Programs\stack` (usually `%HOME%\AppData\Local\Programs\stack`).
2022-06-21 13:15:08 +01:00
a080a3bda4 Merge pull request #816 from lobis/main
Fixed bad yaml in README example
2022-06-21 09:55:20 +05:30
02be3a9c73 fixed bad yaml in README example 2022-06-08 21:43:57 +02:00
2086306d9c Make it more obvious that the cache call does double duty 2021-02-06 13:36:42 -10:00
15 changed files with 292 additions and 204 deletions

View File

@ -7,6 +7,7 @@ addAssignees: false
# A list of reviewers to be added to pull requests (GitHub user name)
reviewers:
- phantsure
- kotewar
- aparna-ravindra
- tiwarishub
- vsvipul

View File

@ -11,5 +11,5 @@ jobs:
- name: 'Auto-assign issue'
uses: pozil/auto-assign-issue@v1.4.0
with:
assignees: phantsure,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
assignees: phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
numOfAssignee: 1

View File

@ -1,6 +1,6 @@
name: 'Auto Assign'
on:
pull_request:
pull_request_target:
types: [opened, ready_for_review]
jobs:

View File

@ -1,6 +1,6 @@
---
name: "@actions/cache"
version: 2.0.6
version: 3.0.3
type: npm
summary:
homepage:

View File

@ -15,6 +15,9 @@ See ["Caching dependencies to speed up workflows"](https://help.github.com/githu
* Updated the minimum runner version support from node 12 -> node 16.
* Fixed avoiding empty cache save when no files are available for caching.
* Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`.
* Fixed zstd failing on amazon linux 2.0 runners
* Fixed cache not working with github workspace directory or current directory
* Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads.
Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions
@ -72,6 +75,8 @@ jobs:
run: /primes.sh -d prime-numbers
```
> Note: You must use the `cache` action in your workflow before you need to use the files that might be restored from the cache. If the provided `key` doesn't match an existing cache, a new cache is automatically created if the job completes successfully.
## Implementation Examples
Every programming language and framework has its own way of caching.
@ -79,6 +84,7 @@ Every programming language and framework has its own way of caching.
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
- [C# - NuGet](./examples.md#c---nuget)
- [Clojure - Lein Deps](./examples.md#clojure---lein-deps)
- [D - DUB](./examples.md#d---dub)
- [Deno](./examples.md#deno)
- [Elixir - Mix](./examples.md#elixir---mix)
@ -172,33 +178,33 @@ jobs:
build-linux:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v3
- name: Cache Primes
id: cache-primes
uses: actions/cache@v3
with:
path: prime-numbers
key: primes
- name: Cache Primes
id: cache-primes
uses: actions/cache@v3
with:
path: prime-numbers
key: primes
- name: Generate Prime Numbers
if: steps.cache-primes.outputs.cache-hit != 'true'
run: ./generate-primes.sh -d prime-numbers
- name: Cache Numbers
id: cache-numbers
uses: actions/cache@v3
with:
path: numbers
key: primes
- name: Generate Prime Numbers
if: steps.cache-primes.outputs.cache-hit != 'true'
run: ./generate-primes.sh -d prime-numbers
- name: Generate Numbers
if: steps.cache-numbers.outputs.cache-hit != 'true'
run: ./generate-primes.sh -d numbers
build-windows:
runs-on: windows-latest
steps:
- name: Cache Numbers
id: cache-numbers
uses: actions/cache@v3
with:
path: numbers
key: primes
- name: Generate Numbers
if: steps.cache-numbers.outputs.cache-hit != 'true'
run: ./generate-primes.sh -d numbers
build-windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- name: Cache Primes

View File

@ -14,4 +14,14 @@
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
### 3.0.4
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
### 3.0.5
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))
### 3.0.6
- Fixed [#809](https://github.com/actions/cache/issues/809) - zstd -d: no such file or directory error
- Fixed [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory
### 3.0.7
- Fixed [#810](https://github.com/actions/cache/issues/810) - download stuck issue. A new timeout is introduced in the download process to abort the download if it gets stuck and doesn't finish within an hour.

View File

@ -227,40 +227,6 @@ test("restore with no cache found", async () => {
);
});
test("restore with server error should fail", async () => {
const path = "node_modules";
const key = "node-test";
testUtils.setInputs({
path: path,
key
});
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const stateMock = jest.spyOn(core, "saveState");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementationOnce(() => {
throw new Error("HTTP Error Occurred");
});
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("restore with restore keys and no cache found", async () => {
const path = "node_modules";
const key = "node-test";

View File

@ -267,7 +267,6 @@ test("save with large cache outputs warning", async () => {
});
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
@ -306,10 +305,10 @@ test("save with reserve cache failure outputs warning", async () => {
expect.anything()
);
expect(infoMock).toHaveBeenCalledWith(
expect(logWarningMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(failedMock).toHaveBeenCalledTimes(0);
});

137
dist/restore/index.js vendored
View File

@ -1113,7 +1113,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -5467,6 +5473,7 @@ const util = __importStar(__webpack_require__(669));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const requestUtils_1 = __webpack_require__(899);
const abort_controller_1 = __webpack_require__(106);
/**
* Pipes the body of a HTTP response to a stream
*
@ -5650,15 +5657,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
@ -5669,6 +5685,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map
/***/ }),
@ -37272,9 +37298,9 @@ function extractTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@ -37305,9 +37331,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return ['--use-compress-program', 'zstdmt --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', 'zstdmt'];
default:
return ['-z'];
}
@ -37338,9 +37364,9 @@ function listTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@ -40789,7 +40815,8 @@ function getDownloadOptions(copy) {
const result = {
useAzureSdk: true,
downloadConcurrency: 8,
timeoutInMs: 30000
timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
@ -40801,10 +40828,14 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs;
}
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result;
}
exports.getDownloadOptions = getDownloadOptions;
@ -46850,17 +46881,18 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
checkKey(key);
}
const compressionMethod = yield utils.getCompressionMethod();
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
let archivePath = '';
try {
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
if (core.isDebug()) {
@ -46870,6 +46902,17 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
return cacheEntry.cacheKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${error.message}`);
}
}
finally {
// Try to delete the archive to save space
@ -46880,7 +46923,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheEntry.cacheKey;
return undefined;
});
}
exports.restoreCache = restoreCache;
@ -46898,7 +46941,7 @@ function saveCache(paths, key, options) {
checkPaths(paths);
checkKey(key);
const compressionMethod = yield utils.getCompressionMethod();
let cacheId = null;
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
@ -46937,6 +46980,18 @@ function saveCache(paths, key, options) {
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else {
core.warning(`Failed to save: ${typedError.message}`);
}
}
finally {
// Try to delete the archive to save space
try {
@ -48996,31 +49051,19 @@ function run() {
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true
});
try {
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
if (!cacheKey) {
core.info(`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`);
return;
}
// Store the matched cache key
utils.setCacheState(cacheKey);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheKey}`);
}
catch (error) {
const typedError = error;
if (typedError.name === cache.ValidationError.name) {
throw error;
}
else {
utils.logWarning(typedError.message);
utils.setCacheHitOutput(false);
}
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
if (!cacheKey) {
core.info(`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`);
return;
}
// Store the matched cache key
utils.setCacheState(cacheKey);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheKey}`);
}
catch (error) {
core.setFailed(error.message);

121
dist/save/index.js vendored
View File

@ -1113,7 +1113,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -5467,6 +5473,7 @@ const util = __importStar(__webpack_require__(669));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const requestUtils_1 = __webpack_require__(899);
const abort_controller_1 = __webpack_require__(106);
/**
* Pipes the body of a HTTP response to a stream
*
@ -5650,15 +5657,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w');
try {
downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}));
if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
}
}
finally {
@ -5669,6 +5685,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
});
}
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map
/***/ }),
@ -37272,9 +37298,9 @@ function extractTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@ -37305,9 +37331,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return ['--use-compress-program', 'zstdmt --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', 'zstdmt'];
default:
return ['-z'];
}
@ -37338,9 +37364,9 @@ function listTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@ -40789,7 +40815,8 @@ function getDownloadOptions(copy) {
const result = {
useAzureSdk: true,
downloadConcurrency: 8,
timeoutInMs: 30000
timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
};
if (copy) {
if (typeof copy.useAzureSdk === 'boolean') {
@ -40801,10 +40828,14 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs;
}
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result;
}
exports.getDownloadOptions = getDownloadOptions;
@ -46792,24 +46823,12 @@ function run() {
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
required: true
});
try {
yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
catch (error) {
const typedError = error;
if (typedError.name === cache.ValidationError.name) {
throw error;
}
else if (typedError.name === cache.ReserveCacheError.name) {
core.info(typedError.message);
}
else {
utils.logWarning(typedError.message);
}
}
}
catch (error) {
utils.logWarning(error.message);
@ -46948,17 +46967,18 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
checkKey(key);
}
const compressionMethod = yield utils.getCompressionMethod();
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
let archivePath = '';
try {
// path are needed to compute version
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
compressionMethod
});
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
// Cache not found
return undefined;
}
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
if (core.isDebug()) {
@ -46968,6 +46988,17 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
return cacheEntry.cacheKey;
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else {
// Supress all non-validation cache related errors because caching should be optional
core.warning(`Failed to restore: ${error.message}`);
}
}
finally {
// Try to delete the archive to save space
@ -46978,7 +47009,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheEntry.cacheKey;
return undefined;
});
}
exports.restoreCache = restoreCache;
@ -46996,7 +47027,7 @@ function saveCache(paths, key, options) {
checkPaths(paths);
checkKey(key);
const compressionMethod = yield utils.getCompressionMethod();
let cacheId = null;
let cacheId = -1;
const cachePaths = yield utils.resolvePaths(paths);
core.debug('Cache Paths:');
core.debug(`${JSON.stringify(cachePaths)}`);
@ -47035,6 +47066,18 @@ function saveCache(paths, key, options) {
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
catch (error) {
const typedError = error;
if (typedError.name === ValidationError.name) {
throw error;
}
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else {
core.warning(`Failed to save: ${typedError.message}`);
}
}
finally {
// Try to delete the archive to save space
try {

View File

@ -1,6 +1,7 @@
# Examples
- [C# - NuGet](#c---nuget)
- [Clojure - Lein Deps](#clojure---lein-deps)
- [D - DUB](#d---dub)
- [POSIX](#posix)
- [Windows](#windows)
@ -80,6 +81,19 @@ steps:
${{ runner.os }}-nuget-
```
## Clojure - Lein Deps
```yaml
- name: Cache lein project dependencies
uses: actions/cache@v3
with:
path: ~/.m2/repository
key: ${{ runner.os }}-clojure-${{ hashFiles('**/project.clj') }}
restore-keys: |
${{ runner.os }}-clojure
```
## D - DUB
### POSIX
@ -223,6 +237,8 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
## Haskell - Stack
### Linux or macOS
```yaml
- uses: actions/cache@v3
name: Cache ~/.stack
@ -240,6 +256,27 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
${{ runner.os }}-stack-work-
```
### Windows
```yaml
- uses: actions/cache@v3
name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack
with:
path: |
~\AppData\Roaming\stack
~\AppData\Local\Programs\stack
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
restore-keys: |
${{ runner.os }}-stack-global-
- uses: actions/cache@v3
name: Cache .stack-work
with:
path: .stack-work
key: ${{ runner.os }}-stack-work-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}-${{ hashFiles('**/*.hs') }}
restore-keys: |
${{ runner.os }}-stack-work-
```
## Java - Gradle
>Note: Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.

18
package-lock.json generated
View File

@ -1,15 +1,15 @@
{
"name": "cache",
"version": "3.0.4",
"version": "3.0.7",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "cache",
"version": "3.0.4",
"version": "3.0.7",
"license": "MIT",
"dependencies": {
"@actions/cache": "^2.0.6",
"@actions/cache": "^3.0.3",
"@actions/core": "^1.7.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"
@ -36,9 +36,9 @@
}
},
"node_modules/@actions/cache": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.6.tgz",
"integrity": "sha512-Z39ZrWaTRRPaV/AOQdY7hve+Iy/HloH5prpz+k+0lZgGQs/3SeO0UYSIakVuXOk2pdMZnl0Nv0PoK1rmh9YfGQ==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.3.tgz",
"integrity": "sha512-kn0pZRQNFRg1IQnW/N7uTNbbLqYalvQW2bmrznn3C34LMY/rSuEmH6Uo69HDh335Q0vKs9kg/jsIarzUBKzEXg==",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -9533,9 +9533,9 @@
},
"dependencies": {
"@actions/cache": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.6.tgz",
"integrity": "sha512-Z39ZrWaTRRPaV/AOQdY7hve+Iy/HloH5prpz+k+0lZgGQs/3SeO0UYSIakVuXOk2pdMZnl0Nv0PoK1rmh9YfGQ==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.3.tgz",
"integrity": "sha512-kn0pZRQNFRg1IQnW/N7uTNbbLqYalvQW2bmrznn3C34LMY/rSuEmH6Uo69HDh335Q0vKs9kg/jsIarzUBKzEXg==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",

View File

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "3.0.4",
"version": "3.0.7",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^2.0.6",
"@actions/cache": "^3.0.3",
"@actions/core": "^1.7.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"

View File

@ -29,38 +29,29 @@ async function run(): Promise<void> {
required: true
});
try {
const cacheKey = await cache.restoreCache(
cachePaths,
primaryKey,
restoreKeys
const cacheKey = await cache.restoreCache(
cachePaths,
primaryKey,
restoreKeys
);
if (!cacheKey) {
core.info(
`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`
);
if (!cacheKey) {
core.info(
`Cache not found for input keys: ${[
primaryKey,
...restoreKeys
].join(", ")}`
);
return;
}
// Store the matched cache key
utils.setCacheState(cacheKey);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheKey}`);
} catch (error: unknown) {
const typedError = error as Error;
if (typedError.name === cache.ValidationError.name) {
throw error;
} else {
utils.logWarning(typedError.message);
utils.setCacheHitOutput(false);
}
return;
}
// Store the matched cache key
utils.setCacheState(cacheKey);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheKey}`);
} catch (error: unknown) {
core.setFailed((error as Error).message);
}

View File

@ -44,20 +44,12 @@ async function run(): Promise<void> {
required: true
});
try {
await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
} catch (error: unknown) {
const typedError = error as Error;
if (typedError.name === cache.ValidationError.name) {
throw error;
} else if (typedError.name === cache.ReserveCacheError.name) {
core.info(typedError.message);
} else {
utils.logWarning(typedError.message);
}
}
} catch (error: unknown) {
utils.logWarning((error as Error).message);