mirror of
https://github.com/actions/cache.git
synced 2025-06-26 20:31:15 +02:00
Compare commits
28 Commits
v3.0.4
...
bishal-pdM
Author | SHA1 | Date | |
---|---|---|---|
8a9a57869f | |||
fb1d96e3ff | |||
23d218be27 | |||
a7c34adf76 | |||
83394c99b7 | |||
e839c25979 | |||
33a923d660 | |||
a404368986 | |||
f4278025ab | |||
9916fe1701 | |||
318935ef66 | |||
85efbb58b9 | |||
4387dbc81a | |||
71e3ee5cce | |||
c316eb7911 | |||
0865c47f36 | |||
354a2ae15e | |||
baed3516c3 | |||
8829e97be1 | |||
eec8cd3f5f | |||
5cc84c0123 | |||
a0e530f115 | |||
afc669e7fc | |||
a0efc56c52 | |||
d25c51bbfd | |||
a080a3bda4 | |||
02be3a9c73 | |||
2086306d9c |
1
.github/auto_assign.yml
vendored
1
.github/auto_assign.yml
vendored
@ -7,6 +7,7 @@ addAssignees: false
|
|||||||
# A list of reviewers to be added to pull requests (GitHub user name)
|
# A list of reviewers to be added to pull requests (GitHub user name)
|
||||||
reviewers:
|
reviewers:
|
||||||
- phantsure
|
- phantsure
|
||||||
|
- kotewar
|
||||||
- aparna-ravindra
|
- aparna-ravindra
|
||||||
- tiwarishub
|
- tiwarishub
|
||||||
- vsvipul
|
- vsvipul
|
||||||
|
2
.github/workflows/auto-assign-issues.yml
vendored
2
.github/workflows/auto-assign-issues.yml
vendored
@ -11,5 +11,5 @@ jobs:
|
|||||||
- name: 'Auto-assign issue'
|
- name: 'Auto-assign issue'
|
||||||
uses: pozil/auto-assign-issue@v1.4.0
|
uses: pozil/auto-assign-issue@v1.4.0
|
||||||
with:
|
with:
|
||||||
assignees: phantsure,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
|
assignees: phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
|
||||||
numOfAssignee: 1
|
numOfAssignee: 1
|
||||||
|
2
.github/workflows/auto-assign.yml
vendored
2
.github/workflows/auto-assign.yml
vendored
@ -1,6 +1,6 @@
|
|||||||
name: 'Auto Assign'
|
name: 'Auto Assign'
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request_target:
|
||||||
types: [opened, ready_for_review]
|
types: [opened, ready_for_review]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: "@actions/cache"
|
name: "@actions/cache"
|
||||||
version: 2.0.6
|
version: 3.0.3
|
||||||
type: npm
|
type: npm
|
||||||
summary:
|
summary:
|
||||||
homepage:
|
homepage:
|
||||||
|
73
README.md
73
README.md
@ -15,6 +15,9 @@ See ["Caching dependencies to speed up workflows"](https://help.github.com/githu
|
|||||||
* Updated the minimum runner version support from node 12 -> node 16.
|
* Updated the minimum runner version support from node 12 -> node 16.
|
||||||
* Fixed avoiding empty cache save when no files are available for caching.
|
* Fixed avoiding empty cache save when no files are available for caching.
|
||||||
* Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`.
|
* Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`.
|
||||||
|
* Fixed zstd failing on amazon linux 2.0 runners
|
||||||
|
* Fixed cache not working with github workspace directory or current directory
|
||||||
|
* Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads.
|
||||||
|
|
||||||
Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions
|
Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions
|
||||||
|
|
||||||
@ -72,6 +75,8 @@ jobs:
|
|||||||
run: /primes.sh -d prime-numbers
|
run: /primes.sh -d prime-numbers
|
||||||
```
|
```
|
||||||
|
|
||||||
|
> Note: You must use the `cache` action in your workflow before you need to use the files that might be restored from the cache. If the provided `key` doesn't match an existing cache, a new cache is automatically created if the job completes successfully.
|
||||||
|
|
||||||
## Implementation Examples
|
## Implementation Examples
|
||||||
|
|
||||||
Every programming language and framework has its own way of caching.
|
Every programming language and framework has its own way of caching.
|
||||||
@ -79,6 +84,7 @@ Every programming language and framework has its own way of caching.
|
|||||||
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
|
||||||
|
|
||||||
- [C# - NuGet](./examples.md#c---nuget)
|
- [C# - NuGet](./examples.md#c---nuget)
|
||||||
|
- [Clojure - Lein Deps](./examples.md#clojure---lein-deps)
|
||||||
- [D - DUB](./examples.md#d---dub)
|
- [D - DUB](./examples.md#d---dub)
|
||||||
- [Deno](./examples.md#deno)
|
- [Deno](./examples.md#deno)
|
||||||
- [Elixir - Mix](./examples.md#elixir---mix)
|
- [Elixir - Mix](./examples.md#elixir---mix)
|
||||||
@ -172,33 +178,33 @@ jobs:
|
|||||||
build-linux:
|
build-linux:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Cache Primes
|
- name: Cache Primes
|
||||||
id: cache-primes
|
id: cache-primes
|
||||||
uses: actions/cache@v3
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
path: prime-numbers
|
path: prime-numbers
|
||||||
key: primes
|
key: primes
|
||||||
|
|
||||||
- name: Generate Prime Numbers
|
- name: Generate Prime Numbers
|
||||||
if: steps.cache-primes.outputs.cache-hit != 'true'
|
if: steps.cache-primes.outputs.cache-hit != 'true'
|
||||||
run: ./generate-primes.sh -d prime-numbers
|
run: ./generate-primes.sh -d prime-numbers
|
||||||
|
|
||||||
- name: Cache Numbers
|
|
||||||
id: cache-numbers
|
|
||||||
uses: actions/cache@v3
|
|
||||||
with:
|
|
||||||
path: numbers
|
|
||||||
key: primes
|
|
||||||
|
|
||||||
- name: Generate Numbers
|
- name: Cache Numbers
|
||||||
if: steps.cache-numbers.outputs.cache-hit != 'true'
|
id: cache-numbers
|
||||||
run: ./generate-primes.sh -d numbers
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
build-windows:
|
path: numbers
|
||||||
runs-on: windows-latest
|
key: primes
|
||||||
steps:
|
|
||||||
|
- name: Generate Numbers
|
||||||
|
if: steps.cache-numbers.outputs.cache-hit != 'true'
|
||||||
|
run: ./generate-primes.sh -d numbers
|
||||||
|
|
||||||
|
build-windows:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Cache Primes
|
- name: Cache Primes
|
||||||
@ -213,6 +219,25 @@ jobs:
|
|||||||
run: ./generate-primes -d prime-numbers
|
run: ./generate-primes -d prime-numbers
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Known practices and workarounds
|
||||||
|
Following are some of the known practices/workarounds which community has used to fulfill specific requirements. You may choose to use them if suits your use case. Note these are not necessarily the only or the recommended solution.
|
||||||
|
|
||||||
|
#### Update a cache
|
||||||
|
A cache today is immutable and cannot be updated. But some use cases require the cache to be saved even though there was a "hit" during restore. To do so, use a `key` which is unique for every run and use `restore-keys` to restore the nearest cache. For example:
|
||||||
|
```
|
||||||
|
- name: update cache on every commit
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: prime-numbers
|
||||||
|
key: primes-${{ runner.os }}-${{ github.run_id }} # Can use time based key as well
|
||||||
|
restore-keys: |
|
||||||
|
primes-${{ runner.os }}
|
||||||
|
```
|
||||||
|
Please note that this will create a new cache on every run and hence will consume the cache [quota](#cache-limits).
|
||||||
|
|
||||||
|
#### Use cache across feature branches
|
||||||
|
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
We would love for you to contribute to `actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
|
We would love for you to contribute to `actions/cache`, pull requests are welcome! Please see the [CONTRIBUTING.md](CONTRIBUTING.md) for more information.
|
||||||
|
|
||||||
|
12
RELEASES.md
12
RELEASES.md
@ -14,4 +14,14 @@
|
|||||||
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
|
||||||
|
|
||||||
### 3.0.4
|
### 3.0.4
|
||||||
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
|
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
|
||||||
|
|
||||||
|
### 3.0.5
|
||||||
|
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))
|
||||||
|
|
||||||
|
### 3.0.6
|
||||||
|
- Fixed [#809](https://github.com/actions/cache/issues/809) - zstd -d: no such file or directory error
|
||||||
|
- Fixed [#833](https://github.com/actions/cache/issues/833) - cache doesn't work with github workspace directory
|
||||||
|
|
||||||
|
### 3.0.7
|
||||||
|
- Fixed [#810](https://github.com/actions/cache/issues/810) - download stuck issue. A new timeout is introduced in the download process to abort the download if it gets stuck and doesn't finish within an hour.
|
@ -227,40 +227,6 @@ test("restore with no cache found", async () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test("restore with server error should fail", async () => {
|
|
||||||
const path = "node_modules";
|
|
||||||
const key = "node-test";
|
|
||||||
testUtils.setInputs({
|
|
||||||
path: path,
|
|
||||||
key
|
|
||||||
});
|
|
||||||
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
|
||||||
const stateMock = jest.spyOn(core, "saveState");
|
|
||||||
const restoreCacheMock = jest
|
|
||||||
.spyOn(cache, "restoreCache")
|
|
||||||
.mockImplementationOnce(() => {
|
|
||||||
throw new Error("HTTP Error Occurred");
|
|
||||||
});
|
|
||||||
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
|
|
||||||
|
|
||||||
await run();
|
|
||||||
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(restoreCacheMock).toHaveBeenCalledWith([path], key, []);
|
|
||||||
|
|
||||||
expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key);
|
|
||||||
|
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred");
|
|
||||||
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
|
||||||
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);
|
|
||||||
|
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
test("restore with restore keys and no cache found", async () => {
|
test("restore with restore keys and no cache found", async () => {
|
||||||
const path = "node_modules";
|
const path = "node_modules";
|
||||||
const key = "node-test";
|
const key = "node-test";
|
||||||
|
@ -267,7 +267,6 @@ test("save with large cache outputs warning", async () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test("save with reserve cache failure outputs warning", async () => {
|
test("save with reserve cache failure outputs warning", async () => {
|
||||||
const infoMock = jest.spyOn(core, "info");
|
|
||||||
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
|
||||||
const failedMock = jest.spyOn(core, "setFailed");
|
const failedMock = jest.spyOn(core, "setFailed");
|
||||||
|
|
||||||
@ -306,10 +305,10 @@ test("save with reserve cache failure outputs warning", async () => {
|
|||||||
expect.anything()
|
expect.anything()
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(infoMock).toHaveBeenCalledWith(
|
expect(logWarningMock).toHaveBeenCalledWith(
|
||||||
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
|
||||||
);
|
);
|
||||||
expect(logWarningMock).toHaveBeenCalledTimes(0);
|
expect(logWarningMock).toHaveBeenCalledTimes(1);
|
||||||
expect(failedMock).toHaveBeenCalledTimes(0);
|
expect(failedMock).toHaveBeenCalledTimes(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
137
dist/restore/index.js
vendored
137
dist/restore/index.js
vendored
@ -1113,7 +1113,13 @@ function resolvePaths(patterns) {
|
|||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
core.debug(`Matched: ${relativeFile}`);
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
paths.push(`${relativeFile}`);
|
if (relativeFile === '') {
|
||||||
|
// path.relative returns empty string if workspace and file are equal
|
||||||
|
paths.push('.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
paths.push(`${relativeFile}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
@ -5467,6 +5473,7 @@ const util = __importStar(__webpack_require__(669));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
const abort_controller_1 = __webpack_require__(106);
|
||||||
/**
|
/**
|
||||||
* Pipes the body of a HTTP response to a stream
|
* Pipes the body of a HTTP response to a stream
|
||||||
*
|
*
|
||||||
@ -5650,15 +5657,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
const fd = fs.openSync(archivePath, 'w');
|
const fd = fs.openSync(archivePath, 'w');
|
||||||
try {
|
try {
|
||||||
downloadProgress.startDisplayTimer();
|
downloadProgress.startDisplayTimer();
|
||||||
|
const controller = new abort_controller_1.AbortController();
|
||||||
|
const abortSignal = controller.signal;
|
||||||
while (!downloadProgress.isDone()) {
|
while (!downloadProgress.isDone()) {
|
||||||
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
||||||
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
||||||
downloadProgress.nextSegment(segmentSize);
|
downloadProgress.nextSegment(segmentSize);
|
||||||
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
|
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
|
||||||
|
abortSignal,
|
||||||
concurrency: options.downloadConcurrency,
|
concurrency: options.downloadConcurrency,
|
||||||
onProgress: downloadProgress.onProgress()
|
onProgress: downloadProgress.onProgress()
|
||||||
});
|
}));
|
||||||
fs.writeFileSync(fd, result);
|
if (result === 'timeout') {
|
||||||
|
controller.abort();
|
||||||
|
throw new Error('Aborting cache download as the download time exceeded the timeout.');
|
||||||
|
}
|
||||||
|
else if (Buffer.isBuffer(result)) {
|
||||||
|
fs.writeFileSync(fd, result);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
@ -5669,6 +5685,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
||||||
|
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
|
let timeoutHandle;
|
||||||
|
const timeoutPromise = new Promise(resolve => {
|
||||||
|
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
|
||||||
|
});
|
||||||
|
return Promise.race([promise, timeoutPromise]).then(result => {
|
||||||
|
clearTimeout(timeoutHandle);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=downloadUtils.js.map
|
//# sourceMappingURL=downloadUtils.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -37272,9 +37298,9 @@ function extractTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37305,9 +37331,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -T0 --long=30'];
|
return ['--use-compress-program', 'zstdmt --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -T0'];
|
return ['--use-compress-program', 'zstdmt'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37338,9 +37364,9 @@ function listTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -40789,7 +40815,8 @@ function getDownloadOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000
|
timeoutInMs: 30000,
|
||||||
|
segmentTimeoutInMs: 3600000
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -40801,10 +40828,14 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.timeoutInMs === 'number') {
|
if (typeof copy.timeoutInMs === 'number') {
|
||||||
result.timeoutInMs = copy.timeoutInMs;
|
result.timeoutInMs = copy.timeoutInMs;
|
||||||
}
|
}
|
||||||
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
||||||
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -46850,17 +46881,18 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
// path are needed to compute version
|
let archivePath = '';
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
});
|
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
|
||||||
try {
|
try {
|
||||||
|
// path are needed to compute version
|
||||||
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
|
compressionMethod
|
||||||
|
});
|
||||||
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
@ -46870,6 +46902,17 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
|
return cacheEntry.cacheKey;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
@ -46880,7 +46923,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return cacheEntry.cacheKey;
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.restoreCache = restoreCache;
|
exports.restoreCache = restoreCache;
|
||||||
@ -46898,7 +46941,7 @@ function saveCache(paths, key, options) {
|
|||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let cacheId = null;
|
let cacheId = -1;
|
||||||
const cachePaths = yield utils.resolvePaths(paths);
|
const cachePaths = yield utils.resolvePaths(paths);
|
||||||
core.debug('Cache Paths:');
|
core.debug('Cache Paths:');
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
@ -46937,6 +46980,18 @@ function saveCache(paths, key, options) {
|
|||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||||
}
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
@ -48996,31 +49051,19 @@ function run() {
|
|||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
try {
|
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
||||||
const cacheKey = yield cache.restoreCache(cachePaths, primaryKey, restoreKeys);
|
if (!cacheKey) {
|
||||||
if (!cacheKey) {
|
core.info(`Cache not found for input keys: ${[
|
||||||
core.info(`Cache not found for input keys: ${[
|
primaryKey,
|
||||||
primaryKey,
|
...restoreKeys
|
||||||
...restoreKeys
|
].join(", ")}`);
|
||||||
].join(", ")}`);
|
return;
|
||||||
return;
|
|
||||||
}
|
|
||||||
// Store the matched cache key
|
|
||||||
utils.setCacheState(cacheKey);
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
|
||||||
}
|
|
||||||
catch (error) {
|
|
||||||
const typedError = error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
utils.setCacheHitOutput(false);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// Store the matched cache key
|
||||||
|
utils.setCacheState(cacheKey);
|
||||||
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
core.setFailed(error.message);
|
core.setFailed(error.message);
|
||||||
|
121
dist/save/index.js
vendored
121
dist/save/index.js
vendored
@ -1113,7 +1113,13 @@ function resolvePaths(patterns) {
|
|||||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||||
core.debug(`Matched: ${relativeFile}`);
|
core.debug(`Matched: ${relativeFile}`);
|
||||||
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
// Paths are made relative so the tar entries are all relative to the root of the workspace.
|
||||||
paths.push(`${relativeFile}`);
|
if (relativeFile === '') {
|
||||||
|
// path.relative returns empty string if workspace and file are equal
|
||||||
|
paths.push('.');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
paths.push(`${relativeFile}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
@ -5467,6 +5473,7 @@ const util = __importStar(__webpack_require__(669));
|
|||||||
const utils = __importStar(__webpack_require__(15));
|
const utils = __importStar(__webpack_require__(15));
|
||||||
const constants_1 = __webpack_require__(931);
|
const constants_1 = __webpack_require__(931);
|
||||||
const requestUtils_1 = __webpack_require__(899);
|
const requestUtils_1 = __webpack_require__(899);
|
||||||
|
const abort_controller_1 = __webpack_require__(106);
|
||||||
/**
|
/**
|
||||||
* Pipes the body of a HTTP response to a stream
|
* Pipes the body of a HTTP response to a stream
|
||||||
*
|
*
|
||||||
@ -5650,15 +5657,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
const fd = fs.openSync(archivePath, 'w');
|
const fd = fs.openSync(archivePath, 'w');
|
||||||
try {
|
try {
|
||||||
downloadProgress.startDisplayTimer();
|
downloadProgress.startDisplayTimer();
|
||||||
|
const controller = new abort_controller_1.AbortController();
|
||||||
|
const abortSignal = controller.signal;
|
||||||
while (!downloadProgress.isDone()) {
|
while (!downloadProgress.isDone()) {
|
||||||
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
|
||||||
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
|
||||||
downloadProgress.nextSegment(segmentSize);
|
downloadProgress.nextSegment(segmentSize);
|
||||||
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
|
const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
|
||||||
|
abortSignal,
|
||||||
concurrency: options.downloadConcurrency,
|
concurrency: options.downloadConcurrency,
|
||||||
onProgress: downloadProgress.onProgress()
|
onProgress: downloadProgress.onProgress()
|
||||||
});
|
}));
|
||||||
fs.writeFileSync(fd, result);
|
if (result === 'timeout') {
|
||||||
|
controller.abort();
|
||||||
|
throw new Error('Aborting cache download as the download time exceeded the timeout.');
|
||||||
|
}
|
||||||
|
else if (Buffer.isBuffer(result)) {
|
||||||
|
fs.writeFileSync(fd, result);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
@ -5669,6 +5685,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
|
||||||
|
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
|
let timeoutHandle;
|
||||||
|
const timeoutPromise = new Promise(resolve => {
|
||||||
|
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
|
||||||
|
});
|
||||||
|
return Promise.race([promise, timeoutPromise]).then(result => {
|
||||||
|
clearTimeout(timeoutHandle);
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
});
|
||||||
//# sourceMappingURL=downloadUtils.js.map
|
//# sourceMappingURL=downloadUtils.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -37272,9 +37298,9 @@ function extractTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37305,9 +37331,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -T0 --long=30'];
|
return ['--use-compress-program', 'zstdmt --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -T0'];
|
return ['--use-compress-program', 'zstdmt'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -37338,9 +37364,9 @@ function listTar(archivePath, compressionMethod) {
|
|||||||
function getCompressionProgram() {
|
function getCompressionProgram() {
|
||||||
switch (compressionMethod) {
|
switch (compressionMethod) {
|
||||||
case constants_1.CompressionMethod.Zstd:
|
case constants_1.CompressionMethod.Zstd:
|
||||||
return ['--use-compress-program', 'zstd -d --long=30'];
|
return ['--use-compress-program', 'unzstd --long=30'];
|
||||||
case constants_1.CompressionMethod.ZstdWithoutLong:
|
case constants_1.CompressionMethod.ZstdWithoutLong:
|
||||||
return ['--use-compress-program', 'zstd -d'];
|
return ['--use-compress-program', 'unzstd'];
|
||||||
default:
|
default:
|
||||||
return ['-z'];
|
return ['-z'];
|
||||||
}
|
}
|
||||||
@ -40789,7 +40815,8 @@ function getDownloadOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: true,
|
useAzureSdk: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 8,
|
||||||
timeoutInMs: 30000
|
timeoutInMs: 30000,
|
||||||
|
segmentTimeoutInMs: 3600000
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.useAzureSdk === 'boolean') {
|
if (typeof copy.useAzureSdk === 'boolean') {
|
||||||
@ -40801,10 +40828,14 @@ function getDownloadOptions(copy) {
|
|||||||
if (typeof copy.timeoutInMs === 'number') {
|
if (typeof copy.timeoutInMs === 'number') {
|
||||||
result.timeoutInMs = copy.timeoutInMs;
|
result.timeoutInMs = copy.timeoutInMs;
|
||||||
}
|
}
|
||||||
|
if (typeof copy.segmentTimeoutInMs === 'number') {
|
||||||
|
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
|
||||||
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
core.debug(`Download concurrency: ${result.downloadConcurrency}`);
|
||||||
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
|
||||||
|
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.getDownloadOptions = getDownloadOptions;
|
exports.getDownloadOptions = getDownloadOptions;
|
||||||
@ -46792,24 +46823,12 @@ function run() {
|
|||||||
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, {
|
||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
try {
|
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
|
||||||
yield cache.saveCache(cachePaths, primaryKey, {
|
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
||||||
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
|
});
|
||||||
});
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
}
|
}
|
||||||
catch (error) {
|
|
||||||
const typedError = error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
else if (typedError.name === cache.ReserveCacheError.name) {
|
|
||||||
core.info(typedError.message);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
utils.logWarning(error.message);
|
utils.logWarning(error.message);
|
||||||
@ -46948,17 +46967,18 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
checkKey(key);
|
checkKey(key);
|
||||||
}
|
}
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
// path are needed to compute version
|
let archivePath = '';
|
||||||
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
|
||||||
compressionMethod
|
|
||||||
});
|
|
||||||
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
|
||||||
// Cache not found
|
|
||||||
return undefined;
|
|
||||||
}
|
|
||||||
const archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
|
||||||
try {
|
try {
|
||||||
|
// path are needed to compute version
|
||||||
|
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, {
|
||||||
|
compressionMethod
|
||||||
|
});
|
||||||
|
if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) {
|
||||||
|
// Cache not found
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod));
|
||||||
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
// Download the cache from the cache entry
|
// Download the cache from the cache entry
|
||||||
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
@ -46968,6 +46988,17 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
|
return cacheEntry.cacheKey;
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
// Supress all non-validation cache related errors because caching should be optional
|
||||||
|
core.warning(`Failed to restore: ${error.message}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
@ -46978,7 +47009,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
core.debug(`Failed to delete archive: ${error}`);
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return cacheEntry.cacheKey;
|
return undefined;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.restoreCache = restoreCache;
|
exports.restoreCache = restoreCache;
|
||||||
@ -46996,7 +47027,7 @@ function saveCache(paths, key, options) {
|
|||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
const compressionMethod = yield utils.getCompressionMethod();
|
const compressionMethod = yield utils.getCompressionMethod();
|
||||||
let cacheId = null;
|
let cacheId = -1;
|
||||||
const cachePaths = yield utils.resolvePaths(paths);
|
const cachePaths = yield utils.resolvePaths(paths);
|
||||||
core.debug('Cache Paths:');
|
core.debug('Cache Paths:');
|
||||||
core.debug(`${JSON.stringify(cachePaths)}`);
|
core.debug(`${JSON.stringify(cachePaths)}`);
|
||||||
@ -47035,6 +47066,18 @@ function saveCache(paths, key, options) {
|
|||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||||
}
|
}
|
||||||
|
catch (error) {
|
||||||
|
const typedError = error;
|
||||||
|
if (typedError.name === ValidationError.name) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
// Try to delete the archive to save space
|
// Try to delete the archive to save space
|
||||||
try {
|
try {
|
||||||
|
37
examples.md
37
examples.md
@ -1,6 +1,7 @@
|
|||||||
# Examples
|
# Examples
|
||||||
|
|
||||||
- [C# - NuGet](#c---nuget)
|
- [C# - NuGet](#c---nuget)
|
||||||
|
- [Clojure - Lein Deps](#clojure---lein-deps)
|
||||||
- [D - DUB](#d---dub)
|
- [D - DUB](#d---dub)
|
||||||
- [POSIX](#posix)
|
- [POSIX](#posix)
|
||||||
- [Windows](#windows)
|
- [Windows](#windows)
|
||||||
@ -80,6 +81,19 @@ steps:
|
|||||||
${{ runner.os }}-nuget-
|
${{ runner.os }}-nuget-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Clojure - Lein Deps
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- name: Cache lein project dependencies
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.m2/repository
|
||||||
|
key: ${{ runner.os }}-clojure-${{ hashFiles('**/project.clj') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-clojure
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## D - DUB
|
## D - DUB
|
||||||
|
|
||||||
### POSIX
|
### POSIX
|
||||||
@ -223,6 +237,8 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
|
|
||||||
## Haskell - Stack
|
## Haskell - Stack
|
||||||
|
|
||||||
|
### Linux or macOS
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
- uses: actions/cache@v3
|
- uses: actions/cache@v3
|
||||||
name: Cache ~/.stack
|
name: Cache ~/.stack
|
||||||
@ -240,6 +256,27 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
|
|||||||
${{ runner.os }}-stack-work-
|
${{ runner.os }}-stack-work-
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Windows
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
- uses: actions/cache@v3
|
||||||
|
name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
~\AppData\Roaming\stack
|
||||||
|
~\AppData\Local\Programs\stack
|
||||||
|
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-stack-global-
|
||||||
|
- uses: actions/cache@v3
|
||||||
|
name: Cache .stack-work
|
||||||
|
with:
|
||||||
|
path: .stack-work
|
||||||
|
key: ${{ runner.os }}-stack-work-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}-${{ hashFiles('**/*.hs') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-stack-work-
|
||||||
|
```
|
||||||
|
|
||||||
## Java - Gradle
|
## Java - Gradle
|
||||||
|
|
||||||
>Note: Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.
|
>Note: Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.
|
||||||
|
18
package-lock.json
generated
18
package-lock.json
generated
@ -1,15 +1,15 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.0.4",
|
"version": "3.0.7",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.0.4",
|
"version": "3.0.7",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^2.0.6",
|
"@actions/cache": "^3.0.3",
|
||||||
"@actions/core": "^1.7.0",
|
"@actions/core": "^1.7.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
@ -36,9 +36,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/cache": {
|
"node_modules/@actions/cache": {
|
||||||
"version": "2.0.6",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.3.tgz",
|
||||||
"integrity": "sha512-Z39ZrWaTRRPaV/AOQdY7hve+Iy/HloH5prpz+k+0lZgGQs/3SeO0UYSIakVuXOk2pdMZnl0Nv0PoK1rmh9YfGQ==",
|
"integrity": "sha512-kn0pZRQNFRg1IQnW/N7uTNbbLqYalvQW2bmrznn3C34LMY/rSuEmH6Uo69HDh335Q0vKs9kg/jsIarzUBKzEXg==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
@ -9533,9 +9533,9 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "2.0.6",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-2.0.6.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.3.tgz",
|
||||||
"integrity": "sha512-Z39ZrWaTRRPaV/AOQdY7hve+Iy/HloH5prpz+k+0lZgGQs/3SeO0UYSIakVuXOk2pdMZnl0Nv0PoK1rmh9YfGQ==",
|
"integrity": "sha512-kn0pZRQNFRg1IQnW/N7uTNbbLqYalvQW2bmrznn3C34LMY/rSuEmH6Uo69HDh335Q0vKs9kg/jsIarzUBKzEXg==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cache",
|
"name": "cache",
|
||||||
"version": "3.0.4",
|
"version": "3.0.7",
|
||||||
"private": true,
|
"private": true,
|
||||||
"description": "Cache dependencies and build outputs",
|
"description": "Cache dependencies and build outputs",
|
||||||
"main": "dist/restore/index.js",
|
"main": "dist/restore/index.js",
|
||||||
@ -23,7 +23,7 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^2.0.6",
|
"@actions/cache": "^3.0.3",
|
||||||
"@actions/core": "^1.7.0",
|
"@actions/core": "^1.7.0",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2"
|
"@actions/io": "^1.1.2"
|
||||||
|
@ -29,38 +29,29 @@ async function run(): Promise<void> {
|
|||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
const cacheKey = await cache.restoreCache(
|
||||||
const cacheKey = await cache.restoreCache(
|
cachePaths,
|
||||||
cachePaths,
|
primaryKey,
|
||||||
primaryKey,
|
restoreKeys
|
||||||
restoreKeys
|
);
|
||||||
|
|
||||||
|
if (!cacheKey) {
|
||||||
|
core.info(
|
||||||
|
`Cache not found for input keys: ${[
|
||||||
|
primaryKey,
|
||||||
|
...restoreKeys
|
||||||
|
].join(", ")}`
|
||||||
);
|
);
|
||||||
if (!cacheKey) {
|
|
||||||
core.info(
|
|
||||||
`Cache not found for input keys: ${[
|
|
||||||
primaryKey,
|
|
||||||
...restoreKeys
|
|
||||||
].join(", ")}`
|
|
||||||
);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Store the matched cache key
|
return;
|
||||||
utils.setCacheState(cacheKey);
|
|
||||||
|
|
||||||
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
|
||||||
utils.setCacheHitOutput(isExactKeyMatch);
|
|
||||||
|
|
||||||
core.info(`Cache restored from key: ${cacheKey}`);
|
|
||||||
} catch (error: unknown) {
|
|
||||||
const typedError = error as Error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
} else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
utils.setCacheHitOutput(false);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Store the matched cache key
|
||||||
|
utils.setCacheState(cacheKey);
|
||||||
|
|
||||||
|
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheKey);
|
||||||
|
utils.setCacheHitOutput(isExactKeyMatch);
|
||||||
|
core.info(`Cache restored from key: ${cacheKey}`);
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
core.setFailed((error as Error).message);
|
core.setFailed((error as Error).message);
|
||||||
}
|
}
|
||||||
|
18
src/save.ts
18
src/save.ts
@ -44,20 +44,12 @@ async function run(): Promise<void> {
|
|||||||
required: true
|
required: true
|
||||||
});
|
});
|
||||||
|
|
||||||
try {
|
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
|
||||||
await cache.saveCache(cachePaths, primaryKey, {
|
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
||||||
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
|
});
|
||||||
});
|
|
||||||
|
if (cacheId != -1) {
|
||||||
core.info(`Cache saved with key: ${primaryKey}`);
|
core.info(`Cache saved with key: ${primaryKey}`);
|
||||||
} catch (error: unknown) {
|
|
||||||
const typedError = error as Error;
|
|
||||||
if (typedError.name === cache.ValidationError.name) {
|
|
||||||
throw error;
|
|
||||||
} else if (typedError.name === cache.ReserveCacheError.name) {
|
|
||||||
core.info(typedError.message);
|
|
||||||
} else {
|
|
||||||
utils.logWarning(typedError.message);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error: unknown) {
|
} catch (error: unknown) {
|
||||||
utils.logWarning((error as Error).message);
|
utils.logWarning((error as Error).message);
|
||||||
|
Reference in New Issue
Block a user