Compare commits

..

9 Commits

Author SHA1 Message Date
20817ef617 dist 2022-07-07 07:11:11 +00:00
103570a2bf Merge branch 'main' of https://github.com/actions/cache into tiwarishub/cache-3-0 2022-07-06 07:32:11 +00:00
aeb01573e6 dist 2022-06-30 09:36:52 +00:00
d351e68b9a review comment 2022-06-30 06:36:53 +00:00
3d236ac88e dist 2022-06-25 20:05:15 +00:00
b8ddf3df10 resolve package 2022-06-25 20:00:53 +00:00
0c5d98e6bb updated package version 2022-06-25 13:18:28 +00:00
7c59aeb02d formatting and error 2022-06-24 05:04:57 +00:00
c75dca6de7 Consuming 3.0 actions/cache 2022-06-24 04:06:33 +00:00
8 changed files with 37 additions and 73 deletions

View File

@ -6,7 +6,6 @@ addAssignees: false
# A list of reviewers to be added to pull requests (GitHub user name)
reviewers:
- phantsure
- kotewar
- aparna-ravindra
- tiwarishub

View File

@ -11,5 +11,5 @@ jobs:
- name: 'Auto-assign issue'
uses: pozil/auto-assign-issue@v1.4.0
with:
assignees: phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
assignees: kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
numOfAssignee: 1

View File

@ -15,6 +15,3 @@
### 3.0.4
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
### 3.0.5
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))

Binary file not shown.

40
dist/restore/index.js vendored
View File

@ -1113,13 +1113,7 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
paths.push(`${relativeFile}`);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -5470,7 +5464,6 @@ const buffer = __importStar(__webpack_require__(293));
const fs = __importStar(__webpack_require__(747));
const stream = __importStar(__webpack_require__(794));
const util = __importStar(__webpack_require__(669));
const timer = __importStar(__webpack_require__(581));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const requestUtils_1 = __webpack_require__(899);
@ -5661,14 +5654,10 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield Promise.race([client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
}),
timer.setTimeout(60 * 60 * 1000, 'timeout')]);
if (result === 'timeout') {
throw new Error("Segment download timed out");
}
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}
}
@ -37283,9 +37272,9 @@ function extractTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'unzstd --long=30'];
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'unzstd'];
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
@ -37316,9 +37305,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstdmt --long=30'];
return ['--use-compress-program', 'zstd -T0 --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstdmt'];
return ['--use-compress-program', 'zstd -T0'];
default:
return ['-z'];
}
@ -37349,9 +37338,9 @@ function listTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'unzstd --long=30'];
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'unzstd'];
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
@ -42354,12 +42343,7 @@ function clean(key)
/* 578 */,
/* 579 */,
/* 580 */,
/* 581 */
/***/ (function(module) {
module.exports = require("timers/promises");
/***/ }),
/* 581 */,
/* 582 */
/***/ (function(module) {

40
dist/save/index.js vendored
View File

@ -1113,13 +1113,7 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
paths.push(`${relativeFile}`);
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -5470,7 +5464,6 @@ const buffer = __importStar(__webpack_require__(293));
const fs = __importStar(__webpack_require__(747));
const stream = __importStar(__webpack_require__(794));
const util = __importStar(__webpack_require__(669));
const timer = __importStar(__webpack_require__(581));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const requestUtils_1 = __webpack_require__(899);
@ -5661,14 +5654,10 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield Promise.race([client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
}),
timer.setTimeout(60 * 60 * 1000, 'timeout')]);
if (result === 'timeout') {
throw new Error("Segment download timed out");
}
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
fs.writeFileSync(fd, result);
}
}
@ -37283,9 +37272,9 @@ function extractTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'unzstd --long=30'];
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'unzstd'];
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
@ -37316,9 +37305,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstdmt --long=30'];
return ['--use-compress-program', 'zstd -T0 --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstdmt'];
return ['--use-compress-program', 'zstd -T0'];
default:
return ['-z'];
}
@ -37349,9 +37338,9 @@ function listTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'unzstd --long=30'];
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'unzstd'];
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
@ -42354,12 +42343,7 @@ function clean(key)
/* 578 */,
/* 579 */,
/* 580 */,
/* 581 */
/***/ (function(module) {
module.exports = require("timers/promises");
/***/ }),
/* 581 */,
/* 582 */
/***/ (function(module) {

18
package-lock.json generated
View File

@ -1,15 +1,15 @@
{
"name": "cache",
"version": "3.0.5",
"version": "3.0.4",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "cache",
"version": "3.0.5",
"version": "3.0.4",
"license": "MIT",
"dependencies": {
"@actions/cache": "file:actions-cache-3.0.1.tgz",
"@actions/cache": "^3.0.0",
"@actions/core": "^1.7.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"
@ -36,10 +36,9 @@
}
},
"node_modules/@actions/cache": {
"version": "3.0.1",
"resolved": "file:actions-cache-3.0.1.tgz",
"integrity": "sha512-ucvw0xvFpe0/vfNQ/rc11ste0nidCdBAJ5j5F01BxBqjxmGH2doVzfPlqSIGhcN7wKI074x2ATb9+7HSrTqGHg==",
"license": "MIT",
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -9534,8 +9533,9 @@
},
"dependencies": {
"@actions/cache": {
"version": "file:actions-cache-3.0.1.tgz",
"integrity": "sha512-ucvw0xvFpe0/vfNQ/rc11ste0nidCdBAJ5j5F01BxBqjxmGH2doVzfPlqSIGhcN7wKI074x2ATb9+7HSrTqGHg==",
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",

View File

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "3.0.5",
"version": "3.0.4",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "file:actions-cache-3.0.1.tgz",
"@actions/cache": "^3.0.0",
"@actions/core": "^1.7.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"