mirror of
https://github.com/actions/cache.git
synced 2025-06-26 04:11:10 +02:00
Compare commits
37 Commits
Author | SHA1 | Date | |
---|---|---|---|
a7c34adf76 | |||
83394c99b7 | |||
e839c25979 | |||
33a923d660 | |||
a404368986 | |||
f4278025ab | |||
9916fe1701 | |||
318935ef66 | |||
85efbb58b9 | |||
4387dbc81a | |||
71e3ee5cce | |||
c316eb7911 | |||
0865c47f36 | |||
354a2ae15e | |||
baed3516c3 | |||
8829e97be1 | |||
eec8cd3f5f | |||
5cc84c0123 | |||
a0e530f115 | |||
afc669e7fc | |||
a0efc56c52 | |||
d25c51bbfd | |||
a080a3bda4 | |||
02be3a9c73 | |||
c3f1317a9e | |||
d0a54b996c | |||
8c5bd0c82d | |||
c9c0f73558 | |||
2b6caae3c9 | |||
dd58d1315f | |||
acace7fa1b | |||
438628ac22 | |||
c296e6a08c | |||
7ed7f22ed8 | |||
3767bf0386 | |||
2086306d9c | |||
009fe4e01c |
1
.github/auto_assign.yml
vendored
1
.github/auto_assign.yml
vendored
@ -7,6 +7,7 @@ addAssignees: false
|
|||||||
# A list of reviewers to be added to pull requests (GitHub user name)
|
# A list of reviewers to be added to pull requests (GitHub user name)
|
||||||
reviewers:
|
reviewers:
|
||||||
- phantsure
|
- phantsure
|
||||||
|
- kotewar
|
||||||
- aparna-ravindra
|
- aparna-ravindra
|
||||||
- tiwarishub
|
- tiwarishub
|
||||||
- vsvipul
|
- vsvipul
|
||||||
|
2
.github/workflows/auto-assign-issues.yml
vendored
2
.github/workflows/auto-assign-issues.yml
vendored
@ -11,5 +11,5 @@ jobs:
|
|||||||
- name: 'Auto-assign issue'
|
- name: 'Auto-assign issue'
|
||||||
uses: pozil/auto-assign-issue@v1.4.0
|
uses: pozil/auto-assign-issue@v1.4.0
|
||||||
with:
|
with:
|
||||||
assignees: phantsure,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
|
assignees: phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
|
||||||
numOfAssignee: 1
|
numOfAssignee: 1
|
||||||
|
2
.github/workflows/auto-assign.yml
vendored
2
.github/workflows/auto-assign.yml
vendored
@ -1,6 +1,6 @@
|
|||||||
name: 'Auto Assign'
|
name: 'Auto Assign'
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request_target:
|
||||||
types: [opened, ready_for_review]
|
types: [opened, ready_for_review]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: "@actions/cache"
|
name: "@actions/cache"
|
||||||
version: 2.0.5
|
version: 3.0.3
|
||||||
type: npm
|
type: npm
|
||||||
summary:
|
summary:
|
||||||
homepage:
|
homepage:
|
||||||
|
10
README.md
10
README.md
@ -14,6 +14,10 @@ See ["Caching dependencies to speed up workflows"](https://help.github.com/githu
|
|||||||
* Fixed download issue for files > 2GB during restore.
|
* Fixed download issue for files > 2GB during restore.
|
||||||
* Updated the minimum runner version support from node 12 -> node 16.
|
* Updated the minimum runner version support from node 12 -> node 16.
|
||||||
* Fixed avoiding empty cache save when no files are available for caching.
|
* Fixed avoiding empty cache save when no files are available for caching.
|
||||||
|
* Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`.
|
||||||
|
* Fixed zstd failing on amazon linux 2.0 runners
|
||||||
|
* Fixed cache not working with github workspace directory or current directory
|
||||||
|
* Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads.
|
||||||
|
|
||||||
Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions
|
Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions
|
||||||
|
|
||||||
@ -28,7 +32,8 @@ If you are using this inside a container, a POSIX-compliant `tar` needs to be in
|
|||||||
|
|
||||||
* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
* `path` - A list of files, directories, and wildcard patterns to cache and restore. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
|
||||||
* `key` - An explicit key for restoring and saving the cache
|
* `key` - An explicit key for restoring and saving the cache
|
||||||
* `restore-keys` - An ordered list of keys to use for restoring the cache if no cache hit occurred for key
|
* `restore-keys` - An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note
|
||||||
|
`cache-hit` returns false in this case.
|
||||||
|
|
||||||
### Outputs
|
### Outputs
|
||||||
|
|
||||||
@ -70,6 +75,8 @@ jobs:
|
|||||||
run: /primes.sh -d prime-numbers
|
run: /primes.sh -d prime-numbers
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> Note: You must use the `cache` action in your workflow before you need to use the files that might be restored from the cache. If the provided `key` doesn't match an existing cache, a new cache is automatically created if the job completes successfully.
|
||||||
|
|
||||||
## Implementation Examples
|
## Implementation Examples
|
||||||
|
|
||||||
Every programming language and framework has its own way of caching.
|
Every programming language and framework has its own way of caching.
|
||||||
@ -77,6 +84,7 @@ Every programming language and framework has its own way of caching.
|
|||||||
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
||||||
|
|
||||||
- [C# - NuGet](./examples.md#c---nuget)
|
- [C# - NuGet](./examples.md#c---nuget)
|
||||||
|
- [Clojure - Lein Deps](./examples.md#clojure---lein-deps)
|
||||||
- [D - DUB](./examples.md#d---dub)
|
- [D - DUB](./examples.md#d---dub)
|
||||||
- [Deno](./examples.md#deno)
|
- [Deno](./examples.md#deno)
|
||||||
- [Elixir - Mix](./examples.md#elixir---mix)
|
- [Elixir - Mix](./examples.md#elixir---mix)
|
||||||
|
13
RELEASES.md
13
RELEASES.md
@ -12,3 +12,16 @@
|
|||||||
|
|
||||||
### 3.0.3
|
### 3.0.3
|
||||||
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||||
|
|
||||||
|
### 3.0.4
|
||||||
|
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
|
||||||
|
|
||||||
|
### 3.0.5
|
||||||
|
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))
|
||||||
|
|
||||||
|
### 3.0.6
|
||||||
|
- Fixed [#809](https://github.com/actions/cache/issues/809) - zstd -d: no such file or directory error
|
||||||
|
- Fixed [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory
|
||||||
|
|
||||||
|
### 3.0.7
|
||||||
|
- Fixed [#810](https://github.com/actions/cache/issues/810) - download stuck issue. A new timeout is introduced in the download process to abort the download if it gets stuck and doesn't finish within an hour.
|
@ -227,40 +227,6 @@ test("restore with no cache found", async () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with server error should fail", async () => {
|
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
|
||||||
testUtils.setInputs({
|
|
||||||
path: path,
|
|
||||||
key
|
|
||||||
});
|
|
||||||
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
throw new Error("HTTP Error Occurred");
|
|
||||||
});
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("restore with restore keys and no cache found", async () => {
|
test("restore with restore keys and no cache found", async () => {
|
||||||
const path = "node_modules";
|
const path = "node_modules";
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
|
@ -267,7 +267,6 @@ test("save with large cache outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("save with reserve cache failure outputs warning", async () => {
|
test("save with reserve cache failure outputs warning", async () => {
|
||||||
const infoMock = jest.spyOn(core, "info");
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
@ -306,10 +305,10 @@ test("save with reserve cache failure outputs warning", async () => {
|
|||||||
expect.anything()
|
expect.anything()
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
);
|
);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ inputs:
|
|||||||
description: 'An explicit key for restoring and saving the cache'
|
description: 'An explicit key for restoring and saving the cache'
|
||||||
required: true
|
required: true
|
||||||
restore-keys:
|
restore-keys:
|
||||||
description: 'An ordered list of keys to use for restoring the cache if no cache hit occurred for key'
|
description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.'
|
||||||
required: false
|
required: false
|
||||||
upload-chunk-size:
|
upload-chunk-size:
|
||||||
description: 'The chunk size used to split up large files during upload, in bytes'
|
description: 'The chunk size used to split up large files during upload, in bytes'
|
||||||
|
95
dist/restore/index.js
vendored
95
dist/restore/index.js
vendored
@ -1113,9 +1113,15 @@ function resolvePaths(patterns) {
|
|||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
core.debug(`Matched: ${relativeFile}`);
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
|
if (relativeFile === '') {
|
||||||
|
// path.relative returns empty string if workspace and file are equal
|
||||||
|
paths.push('.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
paths.push(`${relativeFile}`);
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
@ -5467,6 +5473,7 @@ const util = __importStar(__webpack_require__(669));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
const abort_controller_1 = __webpack_require__(106);
|
||||||
/**
|
/**
|
||||||
* Pipes the body of a HTTP response to a stream
|
* Pipes the body of a HTTP response to a stream
|
||||||
*
|
*
|
||||||
@ -5650,17 +5657,26 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
const fd = fs.openSync(archivePath, 'w');
|
const fd = fs.openSync(archivePath, 'w');
|
||||||
try {
|
try {
|
||||||
downloadProgress.startDisplayTimer();
|
downloadProgress.startDisplayTimer();
|
||||||
|
const controller = new abort_controller_1.AbortController();
|
||||||
|
const abortSignal = controller.signal;
|
||||||
while (!downloadProgress.isDone()) {
|
while (!downloadProgress.isDone()) {
|
||||||
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
||||||
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
||||||
downloadProgress.nextSegment(segmentSize);
|
downloadProgress.nextSegment(segmentSize);
|
||||||
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
|
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
|
||||||
|
abortSignal,
|
||||||
concurrency: options.downloadConcurrency,
|
concurrency: options.downloadConcurrency,
|
||||||
onProgress: downloadProgress.onProgress()
|
onProgress: downloadProgress.onProgress()
|
||||||
});
|
}));
|
||||||
|
if (result === 'timeout') {
|
||||||
|
controller.abort();
|
||||||
|
throw new Error('Aborting cache download as the download time exceeded the timeout.');
|
||||||
|
}
|
||||||
|
else if (Buffer.isBuffer(result)) {
|
||||||
fs.writeFileSync(fd, result);
|
fs.writeFileSync(fd, result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
downloadProgress.stopDisplayTimer();
|
downloadProgress.stopDisplayTimer();
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
@ -5669,6 +5685,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
||||||
|
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
|
let timeoutHandle;
|
||||||
|
const timeoutPromise = new Promise(resolve => {
|
||||||
|
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
|
||||||
|
});
|
||||||
|
return Promise.race([promise, timeoutPromise]).then(result => {
|
||||||
|
clearTimeout(timeoutHandle);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=downloadUtils.js.map
|
//# sourceMappingURL=downloadUtils.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -37272,9 +37298,9 @@ function extractTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37305,9 +37331,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -T0 --long=30'];
|
return ['--use-compress-program', 'zstdmt --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -T0'];
|
return ['--use-compress-program', 'zstdmt'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37317,6 +37343,8 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
|||||||
...getCompressionProgram(),
|
...getCompressionProgram(),
|
||||||
'-cf',
|
'-cf',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
'-P',
|
'-P',
|
||||||
'-C',
|
'-C',
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
@ -37336,9 +37364,9 @@ function listTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -40787,7 +40815,8 @@ function getDownloadOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000
|
timeoutInMs: 30000,
|
||||||
|
segmentTimeoutInMs: 3600000
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -40799,10 +40828,14 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.timeoutInMs === 'number') {
|
if (typeof copy.timeoutInMs === 'number') {
|
||||||
result.timeoutInMs = copy.timeoutInMs;
|
result.timeoutInMs = copy.timeoutInMs;
|
||||||
}
|
}
|
||||||
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
||||||
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -46848,6 +46881,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
|
let archivePath = '';
|
||||||
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
@ -46856,9 +46891,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
try {
|
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
@ -46868,6 +46902,17 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
|
return cacheEntry.cacheKey;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
@ -46878,7 +46923,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return cacheEntry.cacheKey;
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.restoreCache = restoreCache;
|
exports.restoreCache = restoreCache;
|
||||||
@ -46896,7 +46941,7 @@ function saveCache(paths, key, options) {
|
|||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let cacheId = null;
|
let cacheId = -1;
|
||||||
const cachePaths = yield utils.resolvePaths(paths);
|
const cachePaths = yield utils.resolvePaths(paths);
|
||||||
core.debug('Cache Paths:');
|
core.debug('Cache Paths:');
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
@ -46935,6 +46980,18 @@ function saveCache(paths, key, options) {
|
|||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||||
}
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
@ -48994,7 +49051,6 @@ function run() {
|
|||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
try {
|
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
core.info(`Cache not found for input keys: ${[
|
core.info(`Cache not found for input keys: ${[
|
||||||
@ -49009,17 +49065,6 @@ function run() {
|
|||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
|
||||||
const typedError = error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
utils.setCacheHitOutput(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
}
|
}
|
||||||
|
99
dist/save/index.js
vendored
99
dist/save/index.js
vendored
@ -1113,9 +1113,15 @@ function resolvePaths(patterns) {
|
|||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
core.debug(`Matched: ${relativeFile}`);
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
|
if (relativeFile === '') {
|
||||||
|
// path.relative returns empty string if workspace and file are equal
|
||||||
|
paths.push('.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
paths.push(`${relativeFile}`);
|
paths.push(`${relativeFile}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
@ -5467,6 +5473,7 @@ const util = __importStar(__webpack_require__(669));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
const abort_controller_1 = __webpack_require__(106);
|
||||||
/**
|
/**
|
||||||
* Pipes the body of a HTTP response to a stream
|
* Pipes the body of a HTTP response to a stream
|
||||||
*
|
*
|
||||||
@ -5650,17 +5657,26 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
const fd = fs.openSync(archivePath, 'w');
|
const fd = fs.openSync(archivePath, 'w');
|
||||||
try {
|
try {
|
||||||
downloadProgress.startDisplayTimer();
|
downloadProgress.startDisplayTimer();
|
||||||
|
const controller = new abort_controller_1.AbortController();
|
||||||
|
const abortSignal = controller.signal;
|
||||||
while (!downloadProgress.isDone()) {
|
while (!downloadProgress.isDone()) {
|
||||||
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
||||||
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
||||||
downloadProgress.nextSegment(segmentSize);
|
downloadProgress.nextSegment(segmentSize);
|
||||||
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
|
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
|
||||||
|
abortSignal,
|
||||||
concurrency: options.downloadConcurrency,
|
concurrency: options.downloadConcurrency,
|
||||||
onProgress: downloadProgress.onProgress()
|
onProgress: downloadProgress.onProgress()
|
||||||
});
|
}));
|
||||||
|
if (result === 'timeout') {
|
||||||
|
controller.abort();
|
||||||
|
throw new Error('Aborting cache download as the download time exceeded the timeout.');
|
||||||
|
}
|
||||||
|
else if (Buffer.isBuffer(result)) {
|
||||||
fs.writeFileSync(fd, result);
|
fs.writeFileSync(fd, result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
downloadProgress.stopDisplayTimer();
|
downloadProgress.stopDisplayTimer();
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
@ -5669,6 +5685,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
||||||
|
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
|
let timeoutHandle;
|
||||||
|
const timeoutPromise = new Promise(resolve => {
|
||||||
|
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
|
||||||
|
});
|
||||||
|
return Promise.race([promise, timeoutPromise]).then(result => {
|
||||||
|
clearTimeout(timeoutHandle);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=downloadUtils.js.map
|
//# sourceMappingURL=downloadUtils.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -37272,9 +37298,9 @@ function extractTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37305,9 +37331,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -T0 --long=30'];
|
return ['--use-compress-program', 'zstdmt --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -T0'];
|
return ['--use-compress-program', 'zstdmt'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37317,6 +37343,8 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
|||||||
...getCompressionProgram(),
|
...getCompressionProgram(),
|
||||||
'-cf',
|
'-cf',
|
||||||
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
|
'--exclude',
|
||||||
|
cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
'-P',
|
'-P',
|
||||||
'-C',
|
'-C',
|
||||||
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
|
||||||
@ -37336,9 +37364,9 @@ function listTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -40787,7 +40815,8 @@ function getDownloadOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000
|
timeoutInMs: 30000,
|
||||||
|
segmentTimeoutInMs: 3600000
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -40799,10 +40828,14 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.timeoutInMs === 'number') {
|
if (typeof copy.timeoutInMs === 'number') {
|
||||||
result.timeoutInMs = copy.timeoutInMs;
|
result.timeoutInMs = copy.timeoutInMs;
|
||||||
}
|
}
|
||||||
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
||||||
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -46790,24 +46823,12 @@ function run() {
|
|||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
try {
|
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
|
||||||
yield cache.saveCache(cachePaths, primaryKey, {
|
|
||||||
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
||||||
});
|
});
|
||||||
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
|
||||||
const typedError = error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
else if (typedError.name === cache.ReserveCacheError.name) {
|
|
||||||
core.info(typedError.message);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
utils.logWarning(error.message);
|
utils.logWarning(error.message);
|
||||||
@ -46946,6 +46967,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
|
let archivePath = '';
|
||||||
|
try {
|
||||||
// path are needed to compute version
|
// path are needed to compute version
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
compressionMethod
|
compressionMethod
|
||||||
@ -46954,9 +46977,8 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
// Cache not found
|
// Cache not found
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
try {
|
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
@ -46966,6 +46988,17 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
|
return cacheEntry.cacheKey;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
@ -46976,7 +47009,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return cacheEntry.cacheKey;
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.restoreCache = restoreCache;
|
exports.restoreCache = restoreCache;
|
||||||
@ -46994,7 +47027,7 @@ function saveCache(paths, key, options) {
|
|||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let cacheId = null;
|
let cacheId = -1;
|
||||||
const cachePaths = yield utils.resolvePaths(paths);
|
const cachePaths = yield utils.resolvePaths(paths);
|
||||||
core.debug('Cache Paths:');
|
core.debug('Cache Paths:');
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
@ -47033,6 +47066,18 @@ function saveCache(paths, key, options) {
|
|||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||||
}
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
50
examples.md
50
examples.md
@ -1,6 +1,7 @@
|
|||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
- [C# - NuGet](#c---nuget)
|
- [C# - NuGet](#c---nuget)
|
||||||
|
- [Clojure - Lein Deps](#clojure---lein-deps)
|
||||||
- [D - DUB](#d---dub)
|
- [D - DUB](#d---dub)
|
||||||
- [POSIX](#posix)
|
- [POSIX](#posix)
|
||||||
- [Windows](#windows)
|
- [Windows](#windows)
|
||||||
@ -9,6 +10,7 @@
|
|||||||
- [macOS](#macos)
|
- [macOS](#macos)
|
||||||
- [Windows](#windows-1)
|
- [Windows](#windows-1)
|
||||||
- [Elixir - Mix](#elixir---mix)
|
- [Elixir - Mix](#elixir---mix)
|
||||||
|
- [Erlang - Rebar3](#erlang--rebar3)
|
||||||
- [Go - Modules](#go---modules)
|
- [Go - Modules](#go---modules)
|
||||||
- [Linux](#linux-1)
|
- [Linux](#linux-1)
|
||||||
- [macOS](#macos-1)
|
- [macOS](#macos-1)
|
||||||
@ -79,6 +81,19 @@ steps:
|
|||||||
${{ runner.os }}-nuget-
|
${{ runner.os }}-nuget-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Clojure - Lein Deps
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Cache lein project dependencies
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.m2/repository
|
||||||
|
key: ${{ runner.os }}-clojure-${{ hashFiles('**/project.clj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-clojure
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## D - DUB
|
## D - DUB
|
||||||
|
|
||||||
### POSIX
|
### POSIX
|
||||||
@ -151,6 +166,18 @@ steps:
|
|||||||
${{ runner.os }}-mix-
|
${{ runner.os }}-mix-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Erlang - Rebar3
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~/.cache/rebar3
|
||||||
|
_build
|
||||||
|
key: ${{ runner.os }}-erlang-${{ env.OTP_VERSION }}-${{ hashFiles('**/*rebar.lock') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-erlang-${{ env.OTP_VERSION }}-
|
||||||
|
```
|
||||||
|
|
||||||
## Go - Modules
|
## Go - Modules
|
||||||
|
|
||||||
### Linux
|
### Linux
|
||||||
@ -210,6 +237,8 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
|
|
||||||
## Haskell - Stack
|
## Haskell - Stack
|
||||||
|
|
||||||
|
### Linux or macOS
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v3
|
- uses: actions/cache@v3
|
||||||
name: Cache ~/.stack
|
name: Cache ~/.stack
|
||||||
@ -227,6 +256,27 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
${{ runner.os }}-stack-work-
|
${{ runner.os }}-stack-work-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Windows
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v3
|
||||||
|
name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~\AppData\Roaming\stack
|
||||||
|
~\AppData\Local\Programs\stack
|
||||||
|
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-stack-global-
|
||||||
|
- uses: actions/cache@v3
|
||||||
|
name: Cache .stack-work
|
||||||
|
with:
|
||||||
|
path: .stack-work
|
||||||
|
key: ${{ runner.os }}-stack-work-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}-${{ hashFiles('**/*.hs') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-stack-work-
|
||||||
|
```
|
||||||
|
|
||||||
## Java - Gradle
|
## Java - Gradle
|
||||||
|
|
||||||
>Note: Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.
|
>Note: Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.
|
||||||
|
18
package-lock.json
generated
18
package-lock.json
generated
@ -1,15 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.0.3",
|
"version": "3.0.7",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.0.3",
|
"version": "3.0.7",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^2.0.5",
|
"@actions/cache": "^3.0.3",
|
||||||
"@actions/core": "^1.7.0",
|
"@actions/core": "^1.7.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
@ -36,9 +36,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "2.0.5",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.3.tgz",
|
||||||
"integrity": "sha512-aG06dsgcVtiuHLJsIfwrDtvzNNJQ+Iqk8DQt1IeI6gG7ezmLaSdZkHEwA/DNrm5TtOahLcgGEo2SXqbFElVMQg==",
|
"integrity": "sha512-kn0pZRQNFRg1IQnW/N7uTNbbLqYalvQW2bmrznn3C34LMY/rSuEmH6Uo69HDh335Q0vKs9kg/jsIarzUBKzEXg==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
@ -9533,9 +9533,9 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "2.0.5",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.5.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.3.tgz",
|
||||||
"integrity": "sha512-aG06dsgcVtiuHLJsIfwrDtvzNNJQ+Iqk8DQt1IeI6gG7ezmLaSdZkHEwA/DNrm5TtOahLcgGEo2SXqbFElVMQg==",
|
"integrity": "sha512-kn0pZRQNFRg1IQnW/N7uTNbbLqYalvQW2bmrznn3C34LMY/rSuEmH6Uo69HDh335Q0vKs9kg/jsIarzUBKzEXg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.0.3",
|
"version": "3.0.7",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
@ -23,7 +23,7 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^2.0.5",
|
"@actions/cache": "^3.0.3",
|
||||||
"@actions/core": "^1.7.0",
|
"@actions/core": "^1.7.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -29,12 +29,12 @@ async function run(): Promise<void> {
|
|||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
|
||||||
const cacheKey = await cache.restoreCache(
|
const cacheKey = await cache.restoreCache(
|
||||||
cachePaths,
|
cachePaths,
|
||||||
primaryKey,
|
primaryKey,
|
||||||
restoreKeys
|
restoreKeys
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!cacheKey) {
|
if (!cacheKey) {
|
||||||
core.info(
|
core.info(
|
||||||
`Cache not found for input keys: ${[
|
`Cache not found for input keys: ${[
|
||||||
@ -42,6 +42,7 @@ async function run(): Promise<void> {
|
|||||||
...restoreKeys
|
...restoreKeys
|
||||||
].join(", ")}`
|
].join(", ")}`
|
||||||
);
|
);
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,17 +51,7 @@ async function run(): Promise<void> {
|
|||||||
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
} catch (error: unknown) {
|
|
||||||
const typedError = error as Error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
} else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
utils.setCacheHitOutput(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
core.setFailed((error as Error).message);
|
core.setFailed((error as Error).message);
|
||||||
}
|
}
|
||||||
|
14
src/save.ts
14
src/save.ts
@ -44,20 +44,12 @@ async function run(): Promise<void> {
|
|||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
|
||||||
await cache.saveCache(cachePaths, primaryKey, {
|
|
||||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
} catch (error: unknown) {
|
|
||||||
const typedError = error as Error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
} else if (typedError.name === cache.ReserveCacheError.name) {
|
|
||||||
core.info(typedError.message);
|
|
||||||
} else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
utils.logWarning((error as Error).message);
|
utils.logWarning((error as Error).message);
|
||||||
|
Reference in New Issue
Block a user