mirror of
https://github.com/actions/upload-artifact.git
synced 2025-06-15 16:27:11 +02:00
Compare commits
22 Commits
Author | SHA1 | Date | |
---|---|---|---|
50769540e7 | |||
d52396ac5d | |||
710f362075 | |||
3b315f26f6 | |||
3be2180eb7 | |||
453e8d0a40 | |||
0a398c1480 | |||
a0c40cf602 | |||
acb59e4776 | |||
cb6558bb10 | |||
834a144ee9 | |||
134dcf33c0 | |||
73a0b9c954 | |||
89ef406dd8 | |||
23d796df36 | |||
e445c64bc2 | |||
0b2256b8c0 | |||
488dcefb9b | |||
04c51f5766 | |||
32a9e276a8 | |||
552bf3722c | |||
79616d2ded |
2
.licenses/npm/@actions/artifact.dep.yml
generated
2
.licenses/npm/@actions/artifact.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: "@actions/artifact"
|
name: "@actions/artifact"
|
||||||
version: 2.1.6
|
version: 2.1.8
|
||||||
type: npm
|
type: npm
|
||||||
summary:
|
summary:
|
||||||
homepage:
|
homepage:
|
||||||
|
2
.licenses/npm/@actions/glob.dep.yml
generated
2
.licenses/npm/@actions/glob.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: "@actions/glob"
|
name: "@actions/glob"
|
||||||
version: 0.3.0
|
version: 0.5.0
|
||||||
type: npm
|
type: npm
|
||||||
summary:
|
summary:
|
||||||
homepage:
|
homepage:
|
||||||
|
@ -64,6 +64,7 @@ There is also a new sub-action, `actions/upload-artifact/merge`. For more info,
|
|||||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once. Otherwise you _will_ encounter an error.
|
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once. Otherwise you _will_ encounter an error.
|
||||||
|
|
||||||
3. Limit of Artifacts for an individual job. Each job in a workflow run now has a limit of 500 artifacts.
|
3. Limit of Artifacts for an individual job. Each job in a workflow run now has a limit of 500 artifacts.
|
||||||
|
4. With `v4.4` and later, hidden files are excluded by default.
|
||||||
|
|
||||||
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
|
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
|
||||||
|
|
||||||
@ -107,6 +108,12 @@ For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
|
|||||||
# Does not fail if the artifact does not exist.
|
# Does not fail if the artifact does not exist.
|
||||||
# Optional. Default is 'false'
|
# Optional. Default is 'false'
|
||||||
overwrite:
|
overwrite:
|
||||||
|
|
||||||
|
# Whether to include hidden files in the provided path in the artifact
|
||||||
|
# The file contents of any hidden files in the path should be validated before
|
||||||
|
# enabled this to avoid uploading sensitive information.
|
||||||
|
# Optional. Default is 'false'
|
||||||
|
include-hidden-files:
|
||||||
```
|
```
|
||||||
|
|
||||||
### Outputs
|
### Outputs
|
||||||
|
@ -61,6 +61,20 @@ const lonelyFilePath = path.join(
|
|||||||
'lonely-file.txt'
|
'lonely-file.txt'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const hiddenFile = path.join(root, '.hidden-file.txt')
|
||||||
|
const fileInHiddenFolderPath = path.join(
|
||||||
|
root,
|
||||||
|
'.hidden-folder',
|
||||||
|
'folder-in-hidden-folder',
|
||||||
|
'file.txt'
|
||||||
|
)
|
||||||
|
const fileInHiddenFolderInFolderA = path.join(
|
||||||
|
root,
|
||||||
|
'folder-a',
|
||||||
|
'.hidden-folder-in-folder-a',
|
||||||
|
'file.txt'
|
||||||
|
)
|
||||||
|
|
||||||
describe('Search', () => {
|
describe('Search', () => {
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
// mock all output so that there is less noise when running tests
|
// mock all output so that there is less noise when running tests
|
||||||
@ -93,6 +107,14 @@ describe('Search', () => {
|
|||||||
recursive: true
|
recursive: true
|
||||||
})
|
})
|
||||||
|
|
||||||
|
await fs.mkdir(
|
||||||
|
path.join(root, '.hidden-folder', 'folder-in-hidden-folder'),
|
||||||
|
{recursive: true}
|
||||||
|
)
|
||||||
|
await fs.mkdir(path.join(root, 'folder-a', '.hidden-folder-in-folder-a'), {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
|
||||||
await fs.writeFile(searchItem1Path, 'search item1 file')
|
await fs.writeFile(searchItem1Path, 'search item1 file')
|
||||||
await fs.writeFile(searchItem2Path, 'search item2 file')
|
await fs.writeFile(searchItem2Path, 'search item2 file')
|
||||||
await fs.writeFile(searchItem3Path, 'search item3 file')
|
await fs.writeFile(searchItem3Path, 'search item3 file')
|
||||||
@ -110,10 +132,19 @@ describe('Search', () => {
|
|||||||
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
|
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
|
||||||
|
|
||||||
await fs.writeFile(lonelyFilePath, 'all by itself')
|
await fs.writeFile(lonelyFilePath, 'all by itself')
|
||||||
|
|
||||||
|
await fs.writeFile(hiddenFile, 'hidden file')
|
||||||
|
await fs.writeFile(fileInHiddenFolderPath, 'file in hidden directory')
|
||||||
|
await fs.writeFile(fileInHiddenFolderInFolderA, 'file in hidden directory')
|
||||||
/*
|
/*
|
||||||
Directory structure of files that get created:
|
Directory structure of files that get created:
|
||||||
root/
|
root/
|
||||||
|
.hidden-folder/
|
||||||
|
folder-in-hidden-folder/
|
||||||
|
file.txt
|
||||||
folder-a/
|
folder-a/
|
||||||
|
.hidden-folder-in-folder-a/
|
||||||
|
file.txt
|
||||||
folder-b/
|
folder-b/
|
||||||
folder-c/
|
folder-c/
|
||||||
search-item1.txt
|
search-item1.txt
|
||||||
@ -136,6 +167,7 @@ describe('Search', () => {
|
|||||||
folder-j/
|
folder-j/
|
||||||
folder-k/
|
folder-k/
|
||||||
lonely-file.txt
|
lonely-file.txt
|
||||||
|
.hidden-file.txt
|
||||||
search-item5.txt
|
search-item5.txt
|
||||||
*/
|
*/
|
||||||
})
|
})
|
||||||
@ -352,4 +384,24 @@ describe('Search', () => {
|
|||||||
)
|
)
|
||||||
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it('Hidden files ignored by default', async () => {
|
||||||
|
const searchPath = path.join(root, '**/*')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath)
|
||||||
|
|
||||||
|
expect(searchResult.filesToUpload).not.toContain(hiddenFile)
|
||||||
|
expect(searchResult.filesToUpload).not.toContain(fileInHiddenFolderPath)
|
||||||
|
expect(searchResult.filesToUpload).not.toContain(
|
||||||
|
fileInHiddenFolderInFolderA
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Hidden files included', async () => {
|
||||||
|
const searchPath = path.join(root, '**/*')
|
||||||
|
const searchResult = await findFilesToUpload(searchPath, true)
|
||||||
|
|
||||||
|
expect(searchResult.filesToUpload).toContain(hiddenFile)
|
||||||
|
expect(searchResult.filesToUpload).toContain(fileInHiddenFolderPath)
|
||||||
|
expect(searchResult.filesToUpload).toContain(fileInHiddenFolderInFolderA)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
@ -40,6 +40,11 @@ inputs:
|
|||||||
If false, the action will fail if an artifact for the given name already exists.
|
If false, the action will fail if an artifact for the given name already exists.
|
||||||
Does not fail if the artifact does not exist.
|
Does not fail if the artifact does not exist.
|
||||||
default: 'false'
|
default: 'false'
|
||||||
|
include-hidden-files:
|
||||||
|
description: >
|
||||||
|
If true, hidden files will be included in the artifact.
|
||||||
|
If false, hidden files will be excluded from the artifact.
|
||||||
|
default: 'false'
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
artifact-id:
|
artifact-id:
|
||||||
|
512
dist/merge/index.js
vendored
512
dist/merge/index.js
vendored
@ -2328,9 +2328,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||||
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
||||||
const stream = __importStar(__nccwpck_require__(12781));
|
|
||||||
const fs_1 = __nccwpck_require__(57147);
|
|
||||||
const path = __importStar(__nccwpck_require__(71017));
|
|
||||||
const github = __importStar(__nccwpck_require__(21260));
|
const github = __importStar(__nccwpck_require__(21260));
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const httpClient = __importStar(__nccwpck_require__(96255));
|
const httpClient = __importStar(__nccwpck_require__(96255));
|
||||||
@ -2371,9 +2368,6 @@ function streamExtract(url, directory) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
if (error.message.includes('Malformed extraction path')) {
|
|
||||||
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
|
|
||||||
}
|
|
||||||
retryCount++;
|
retryCount++;
|
||||||
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
||||||
// wait 5 seconds before retrying
|
// wait 5 seconds before retrying
|
||||||
@ -2396,8 +2390,6 @@ function streamExtractExternal(url, directory) {
|
|||||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||||
};
|
};
|
||||||
const timer = setTimeout(timerFn, timeout);
|
const timer = setTimeout(timerFn, timeout);
|
||||||
const createdDirectories = new Set();
|
|
||||||
createdDirectories.add(directory);
|
|
||||||
response.message
|
response.message
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh();
|
timer.refresh();
|
||||||
@ -2407,47 +2399,11 @@ function streamExtractExternal(url, directory) {
|
|||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
reject(error);
|
reject(error);
|
||||||
})
|
})
|
||||||
.pipe(unzip_stream_1.default.Parse())
|
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||||
.pipe(new stream.Transform({
|
.on('close', () => {
|
||||||
objectMode: true,
|
|
||||||
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const fullPath = path.normalize(path.join(directory, entry.path));
|
|
||||||
if (!directory.endsWith(path.sep)) {
|
|
||||||
directory += path.sep;
|
|
||||||
}
|
|
||||||
if (!fullPath.startsWith(directory)) {
|
|
||||||
reject(new Error(`Malformed extraction path: ${fullPath}`));
|
|
||||||
}
|
|
||||||
if (entry.type === 'Directory') {
|
|
||||||
if (!createdDirectories.has(fullPath)) {
|
|
||||||
createdDirectories.add(fullPath);
|
|
||||||
yield resolveOrCreateDirectory(fullPath).then(() => {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info(`Extracting artifact entry: ${fullPath}`);
|
|
||||||
if (!createdDirectories.has(path.dirname(fullPath))) {
|
|
||||||
createdDirectories.add(path.dirname(fullPath));
|
|
||||||
yield resolveOrCreateDirectory(path.dirname(fullPath));
|
|
||||||
}
|
|
||||||
const writeStream = (0, fs_1.createWriteStream)(fullPath);
|
|
||||||
writeStream.on('finish', callback);
|
|
||||||
writeStream.on('error', reject);
|
|
||||||
entry.pipe(writeStream);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}))
|
|
||||||
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
resolve();
|
resolve();
|
||||||
}))
|
})
|
||||||
.on('error', (error) => {
|
.on('error', (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
@ -3069,8 +3025,9 @@ function isGhes() {
|
|||||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
const isGheHost = hostname.endsWith('.GHE.COM');
|
||||||
return !isGitHubHost && !isGheHost;
|
const isLocalHost = hostname.endsWith('.LOCALHOST');
|
||||||
|
return !isGitHubHost && !isGheHost && !isLocalHost;
|
||||||
}
|
}
|
||||||
exports.isGhes = isGhes;
|
exports.isGhes = isGhes;
|
||||||
function getGitHubWorkspaceDir() {
|
function getGitHubWorkspaceDir() {
|
||||||
@ -8924,16 +8881,18 @@ exports.create = create;
|
|||||||
* Computes the sha256 hash of a glob
|
* Computes the sha256 hash of a glob
|
||||||
*
|
*
|
||||||
* @param patterns Patterns separated by newlines
|
* @param patterns Patterns separated by newlines
|
||||||
|
* @param currentWorkspace Workspace used when matching files
|
||||||
* @param options Glob options
|
* @param options Glob options
|
||||||
|
* @param verbose Enables verbose logging
|
||||||
*/
|
*/
|
||||||
function hashFiles(patterns, options, verbose = false) {
|
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
let followSymbolicLinks = true;
|
let followSymbolicLinks = true;
|
||||||
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||||
followSymbolicLinks = options.followSymbolicLinks;
|
followSymbolicLinks = options.followSymbolicLinks;
|
||||||
}
|
}
|
||||||
const globber = yield create(patterns, { followSymbolicLinks });
|
const globber = yield create(patterns, { followSymbolicLinks });
|
||||||
return internal_hash_files_1.hashFiles(globber, verbose);
|
return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.hashFiles = hashFiles;
|
exports.hashFiles = hashFiles;
|
||||||
@ -8948,7 +8907,11 @@ exports.hashFiles = hashFiles;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -8961,7 +8924,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -8976,7 +8939,8 @@ function getOptions(copy) {
|
|||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
matchDirectories: true,
|
matchDirectories: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true,
|
||||||
|
excludeHiddenFiles: false
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.followSymbolicLinks === 'boolean') {
|
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||||||
@ -8995,6 +8959,10 @@ function getOptions(copy) {
|
|||||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||||
}
|
}
|
||||||
|
if (typeof copy.excludeHiddenFiles === 'boolean') {
|
||||||
|
result.excludeHiddenFiles = copy.excludeHiddenFiles;
|
||||||
|
core.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -9010,7 +8978,11 @@ exports.getOptions = getOptions;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9023,7 +8995,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9077,19 +9049,21 @@ class DefaultGlobber {
|
|||||||
return this.searchPaths.slice();
|
return this.searchPaths.slice();
|
||||||
}
|
}
|
||||||
glob() {
|
glob() {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const result = [];
|
const result = [];
|
||||||
try {
|
try {
|
||||||
for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
for (var _d = true, _e = __asyncValues(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||||
const itemPath = _c.value;
|
_c = _f.value;
|
||||||
|
_d = false;
|
||||||
|
const itemPath = _c;
|
||||||
result.push(itemPath);
|
result.push(itemPath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@ -9147,6 +9121,10 @@ class DefaultGlobber {
|
|||||||
if (!stats) {
|
if (!stats) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// Hidden file or directory?
|
||||||
|
if (options.excludeHiddenFiles && path.basename(item.path).match(/^\./)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
// Directory
|
// Directory
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory()) {
|
||||||
// Matched
|
// Matched
|
||||||
@ -9252,7 +9230,11 @@ exports.DefaultGlobber = DefaultGlobber;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9265,7 +9247,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9293,18 +9275,22 @@ const fs = __importStar(__nccwpck_require__(57147));
|
|||||||
const stream = __importStar(__nccwpck_require__(12781));
|
const stream = __importStar(__nccwpck_require__(12781));
|
||||||
const util = __importStar(__nccwpck_require__(73837));
|
const util = __importStar(__nccwpck_require__(73837));
|
||||||
const path = __importStar(__nccwpck_require__(71017));
|
const path = __importStar(__nccwpck_require__(71017));
|
||||||
function hashFiles(globber, verbose = false) {
|
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
var _b;
|
var _d;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const writeDelegate = verbose ? core.info : core.debug;
|
const writeDelegate = verbose ? core.info : core.debug;
|
||||||
let hasMatch = false;
|
let hasMatch = false;
|
||||||
const githubWorkspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
const githubWorkspace = currentWorkspace
|
||||||
|
? currentWorkspace
|
||||||
|
: (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||||
const result = crypto.createHash('sha256');
|
const result = crypto.createHash('sha256');
|
||||||
let count = 0;
|
let count = 0;
|
||||||
try {
|
try {
|
||||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||||
const file = _d.value;
|
_c = _g.value;
|
||||||
|
_e = false;
|
||||||
|
const file = _c;
|
||||||
writeDelegate(file);
|
writeDelegate(file);
|
||||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
@ -9327,7 +9313,7 @@ function hashFiles(globber, verbose = false) {
|
|||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@ -9367,7 +9353,7 @@ var MatchKind;
|
|||||||
MatchKind[MatchKind["File"] = 2] = "File";
|
MatchKind[MatchKind["File"] = 2] = "File";
|
||||||
/** Matched */
|
/** Matched */
|
||||||
MatchKind[MatchKind["All"] = 3] = "All";
|
MatchKind[MatchKind["All"] = 3] = "All";
|
||||||
})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
|
})(MatchKind || (exports.MatchKind = MatchKind = {}));
|
||||||
//# sourceMappingURL=internal-match-kind.js.map
|
//# sourceMappingURL=internal-match-kind.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -9379,7 +9365,11 @@ var MatchKind;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9392,7 +9382,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9442,8 +9432,8 @@ exports.dirname = dirname;
|
|||||||
* or `C:` are expanded based on the current working directory.
|
* or `C:` are expanded based on the current working directory.
|
||||||
*/
|
*/
|
||||||
function ensureAbsoluteRoot(root, itemPath) {
|
function ensureAbsoluteRoot(root, itemPath) {
|
||||||
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
(0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||||
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Already rooted
|
// Already rooted
|
||||||
if (hasAbsoluteRoot(itemPath)) {
|
if (hasAbsoluteRoot(itemPath)) {
|
||||||
return itemPath;
|
return itemPath;
|
||||||
@ -9453,7 +9443,7 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like C: or C:foo
|
// Check for itemPath like C: or C:foo
|
||||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||||
let cwd = process.cwd();
|
let cwd = process.cwd();
|
||||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
(0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
// Drive letter matches cwd? Expand to cwd
|
// Drive letter matches cwd? Expand to cwd
|
||||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||||
// Drive only, e.g. C:
|
// Drive only, e.g. C:
|
||||||
@ -9478,11 +9468,11 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like \ or \foo
|
// Check for itemPath like \ or \foo
|
||||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
(0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
(0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||||
// Otherwise ensure root ends with a separator
|
// Otherwise ensure root ends with a separator
|
||||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||||
// Intentionally empty
|
// Intentionally empty
|
||||||
@ -9499,7 +9489,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
|||||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasAbsoluteRoot(itemPath) {
|
function hasAbsoluteRoot(itemPath) {
|
||||||
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@ -9516,7 +9506,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
|||||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasRoot(itemPath) {
|
function hasRoot(itemPath) {
|
||||||
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@ -9584,7 +9574,11 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9597,7 +9591,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9622,7 +9616,7 @@ class Path {
|
|||||||
this.segments = [];
|
this.segments = [];
|
||||||
// String
|
// String
|
||||||
if (typeof itemPath === 'string') {
|
if (typeof itemPath === 'string') {
|
||||||
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||||
// Normalize slashes and trim unnecessary trailing slash
|
// Normalize slashes and trim unnecessary trailing slash
|
||||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||||
// Not rooted
|
// Not rooted
|
||||||
@ -9649,24 +9643,24 @@ class Path {
|
|||||||
// Array
|
// Array
|
||||||
else {
|
else {
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
(0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||||
// Each segment
|
// Each segment
|
||||||
for (let i = 0; i < itemPath.length; i++) {
|
for (let i = 0; i < itemPath.length; i++) {
|
||||||
let segment = itemPath[i];
|
let segment = itemPath[i];
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
(0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||||
// Root segment
|
// Root segment
|
||||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||||
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
(0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
// All other segments
|
// All other segments
|
||||||
else {
|
else {
|
||||||
// Must not contain slash
|
// Must not contain slash
|
||||||
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
(0, assert_1.default)(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -9704,7 +9698,11 @@ exports.Path = Path;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9717,7 +9715,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9805,7 +9803,11 @@ exports.partialMatch = partialMatch;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9818,7 +9820,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9850,9 +9852,9 @@ class Pattern {
|
|||||||
else {
|
else {
|
||||||
// Convert to pattern
|
// Convert to pattern
|
||||||
segments = segments || [];
|
segments = segments || [];
|
||||||
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
(0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`);
|
||||||
const root = Pattern.getLiteral(segments[0]);
|
const root = Pattern.getLiteral(segments[0]);
|
||||||
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
(0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||||
if (patternOrNegate) {
|
if (patternOrNegate) {
|
||||||
pattern = `!${pattern}`;
|
pattern = `!${pattern}`;
|
||||||
@ -9946,13 +9948,13 @@ class Pattern {
|
|||||||
*/
|
*/
|
||||||
static fixupPattern(pattern, homedir) {
|
static fixupPattern(pattern, homedir) {
|
||||||
// Empty
|
// Empty
|
||||||
assert_1.default(pattern, 'pattern cannot be empty');
|
(0, assert_1.default)(pattern, 'pattern cannot be empty');
|
||||||
// Must not contain `.` segment, unless first segment
|
// Must not contain `.` segment, unless first segment
|
||||||
// Must not contain `..` segment
|
// Must not contain `..` segment
|
||||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||||
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
(0, assert_1.default)(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||||
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
(0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
pattern = pathHelper.normalizeSeparators(pattern);
|
pattern = pathHelper.normalizeSeparators(pattern);
|
||||||
// Replace leading `.` segment
|
// Replace leading `.` segment
|
||||||
@ -9962,8 +9964,8 @@ class Pattern {
|
|||||||
// Replace leading `~` segment
|
// Replace leading `~` segment
|
||||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||||
homedir = homedir || os.homedir();
|
homedir = homedir || os.homedir();
|
||||||
assert_1.default(homedir, 'Unable to determine HOME directory');
|
(0, assert_1.default)(homedir, 'Unable to determine HOME directory');
|
||||||
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
(0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||||
}
|
}
|
||||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||||
@ -100988,6 +100990,132 @@ function onConnectTimeout (socket) {
|
|||||||
module.exports = buildConnector
|
module.exports = buildConnector
|
||||||
|
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 14462:
|
||||||
|
/***/ ((module) => {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
|
/** @type {Record<string, string | undefined>} */
|
||||||
|
const headerNameLowerCasedRecord = {}
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||||
|
const wellknownHeaderNames = [
|
||||||
|
'Accept',
|
||||||
|
'Accept-Encoding',
|
||||||
|
'Accept-Language',
|
||||||
|
'Accept-Ranges',
|
||||||
|
'Access-Control-Allow-Credentials',
|
||||||
|
'Access-Control-Allow-Headers',
|
||||||
|
'Access-Control-Allow-Methods',
|
||||||
|
'Access-Control-Allow-Origin',
|
||||||
|
'Access-Control-Expose-Headers',
|
||||||
|
'Access-Control-Max-Age',
|
||||||
|
'Access-Control-Request-Headers',
|
||||||
|
'Access-Control-Request-Method',
|
||||||
|
'Age',
|
||||||
|
'Allow',
|
||||||
|
'Alt-Svc',
|
||||||
|
'Alt-Used',
|
||||||
|
'Authorization',
|
||||||
|
'Cache-Control',
|
||||||
|
'Clear-Site-Data',
|
||||||
|
'Connection',
|
||||||
|
'Content-Disposition',
|
||||||
|
'Content-Encoding',
|
||||||
|
'Content-Language',
|
||||||
|
'Content-Length',
|
||||||
|
'Content-Location',
|
||||||
|
'Content-Range',
|
||||||
|
'Content-Security-Policy',
|
||||||
|
'Content-Security-Policy-Report-Only',
|
||||||
|
'Content-Type',
|
||||||
|
'Cookie',
|
||||||
|
'Cross-Origin-Embedder-Policy',
|
||||||
|
'Cross-Origin-Opener-Policy',
|
||||||
|
'Cross-Origin-Resource-Policy',
|
||||||
|
'Date',
|
||||||
|
'Device-Memory',
|
||||||
|
'Downlink',
|
||||||
|
'ECT',
|
||||||
|
'ETag',
|
||||||
|
'Expect',
|
||||||
|
'Expect-CT',
|
||||||
|
'Expires',
|
||||||
|
'Forwarded',
|
||||||
|
'From',
|
||||||
|
'Host',
|
||||||
|
'If-Match',
|
||||||
|
'If-Modified-Since',
|
||||||
|
'If-None-Match',
|
||||||
|
'If-Range',
|
||||||
|
'If-Unmodified-Since',
|
||||||
|
'Keep-Alive',
|
||||||
|
'Last-Modified',
|
||||||
|
'Link',
|
||||||
|
'Location',
|
||||||
|
'Max-Forwards',
|
||||||
|
'Origin',
|
||||||
|
'Permissions-Policy',
|
||||||
|
'Pragma',
|
||||||
|
'Proxy-Authenticate',
|
||||||
|
'Proxy-Authorization',
|
||||||
|
'RTT',
|
||||||
|
'Range',
|
||||||
|
'Referer',
|
||||||
|
'Referrer-Policy',
|
||||||
|
'Refresh',
|
||||||
|
'Retry-After',
|
||||||
|
'Sec-WebSocket-Accept',
|
||||||
|
'Sec-WebSocket-Extensions',
|
||||||
|
'Sec-WebSocket-Key',
|
||||||
|
'Sec-WebSocket-Protocol',
|
||||||
|
'Sec-WebSocket-Version',
|
||||||
|
'Server',
|
||||||
|
'Server-Timing',
|
||||||
|
'Service-Worker-Allowed',
|
||||||
|
'Service-Worker-Navigation-Preload',
|
||||||
|
'Set-Cookie',
|
||||||
|
'SourceMap',
|
||||||
|
'Strict-Transport-Security',
|
||||||
|
'Supports-Loading-Mode',
|
||||||
|
'TE',
|
||||||
|
'Timing-Allow-Origin',
|
||||||
|
'Trailer',
|
||||||
|
'Transfer-Encoding',
|
||||||
|
'Upgrade',
|
||||||
|
'Upgrade-Insecure-Requests',
|
||||||
|
'User-Agent',
|
||||||
|
'Vary',
|
||||||
|
'Via',
|
||||||
|
'WWW-Authenticate',
|
||||||
|
'X-Content-Type-Options',
|
||||||
|
'X-DNS-Prefetch-Control',
|
||||||
|
'X-Frame-Options',
|
||||||
|
'X-Permitted-Cross-Domain-Policies',
|
||||||
|
'X-Powered-By',
|
||||||
|
'X-Requested-With',
|
||||||
|
'X-XSS-Protection'
|
||||||
|
]
|
||||||
|
|
||||||
|
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||||
|
const key = wellknownHeaderNames[i]
|
||||||
|
const lowerCasedKey = key.toLowerCase()
|
||||||
|
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
|
||||||
|
lowerCasedKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||||
|
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
wellknownHeaderNames,
|
||||||
|
headerNameLowerCasedRecord
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 48045:
|
/***/ 48045:
|
||||||
@ -101820,6 +101948,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045)
|
|||||||
const { Blob } = __nccwpck_require__(14300)
|
const { Blob } = __nccwpck_require__(14300)
|
||||||
const nodeUtil = __nccwpck_require__(73837)
|
const nodeUtil = __nccwpck_require__(73837)
|
||||||
const { stringify } = __nccwpck_require__(63477)
|
const { stringify } = __nccwpck_require__(63477)
|
||||||
|
const { headerNameLowerCasedRecord } = __nccwpck_require__(14462)
|
||||||
|
|
||||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||||
|
|
||||||
@ -102029,6 +102158,15 @@ function parseKeepAliveTimeout (val) {
|
|||||||
return m ? parseInt(m[1], 10) * 1000 : null
|
return m ? parseInt(m[1], 10) * 1000 : null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves a header name and returns its lowercase value.
|
||||||
|
* @param {string | Buffer} value Header name
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
function headerNameToString (value) {
|
||||||
|
return headerNameLowerCasedRecord[value] || value.toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
function parseHeaders (headers, obj = {}) {
|
function parseHeaders (headers, obj = {}) {
|
||||||
// For H2 support
|
// For H2 support
|
||||||
if (!Array.isArray(headers)) return headers
|
if (!Array.isArray(headers)) return headers
|
||||||
@ -102300,6 +102438,7 @@ module.exports = {
|
|||||||
isIterable,
|
isIterable,
|
||||||
isAsyncIterable,
|
isAsyncIterable,
|
||||||
isDestroyed,
|
isDestroyed,
|
||||||
|
headerNameToString,
|
||||||
parseRawHeaders,
|
parseRawHeaders,
|
||||||
parseHeaders,
|
parseHeaders,
|
||||||
parseKeepAliveTimeout,
|
parseKeepAliveTimeout,
|
||||||
@ -106436,6 +106575,9 @@ function httpRedirectFetch (fetchParams, response) {
|
|||||||
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
|
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
|
||||||
request.headersList.delete('authorization')
|
request.headersList.delete('authorization')
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#authentication-entries
|
||||||
|
request.headersList.delete('proxy-authorization', true)
|
||||||
|
|
||||||
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
|
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
|
||||||
request.headersList.delete('cookie')
|
request.headersList.delete('cookie')
|
||||||
request.headersList.delete('host')
|
request.headersList.delete('host')
|
||||||
@ -108944,14 +109086,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398
|
|||||||
const assert = __nccwpck_require__(39491)
|
const assert = __nccwpck_require__(39491)
|
||||||
const { isUint8Array } = __nccwpck_require__(29830)
|
const { isUint8Array } = __nccwpck_require__(29830)
|
||||||
|
|
||||||
|
let supportedHashes = []
|
||||||
|
|
||||||
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
||||||
/** @type {import('crypto')|undefined} */
|
/** @type {import('crypto')|undefined} */
|
||||||
let crypto
|
let crypto
|
||||||
|
|
||||||
try {
|
try {
|
||||||
crypto = __nccwpck_require__(6113)
|
crypto = __nccwpck_require__(6113)
|
||||||
|
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
||||||
|
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
||||||
|
/* c8 ignore next 3 */
|
||||||
} catch {
|
} catch {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function responseURL (response) {
|
function responseURL (response) {
|
||||||
@ -109479,66 +109625,56 @@ function bytesMatch (bytes, metadataList) {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. If parsedMetadata is the empty set, return true.
|
// 3. If response is not eligible for integrity validation, return false.
|
||||||
|
// TODO
|
||||||
|
|
||||||
|
// 4. If parsedMetadata is the empty set, return true.
|
||||||
if (parsedMetadata.length === 0) {
|
if (parsedMetadata.length === 0) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. Let metadata be the result of getting the strongest
|
// 5. Let metadata be the result of getting the strongest
|
||||||
// metadata from parsedMetadata.
|
// metadata from parsedMetadata.
|
||||||
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
|
const strongest = getStrongestMetadata(parsedMetadata)
|
||||||
// get the strongest algorithm
|
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
||||||
const strongest = list[0].algo
|
|
||||||
// get all entries that use the strongest algorithm; ignore weaker
|
|
||||||
const metadata = list.filter((item) => item.algo === strongest)
|
|
||||||
|
|
||||||
// 5. For each item in metadata:
|
// 6. For each item in metadata:
|
||||||
for (const item of metadata) {
|
for (const item of metadata) {
|
||||||
// 1. Let algorithm be the alg component of item.
|
// 1. Let algorithm be the alg component of item.
|
||||||
const algorithm = item.algo
|
const algorithm = item.algo
|
||||||
|
|
||||||
// 2. Let expectedValue be the val component of item.
|
// 2. Let expectedValue be the val component of item.
|
||||||
let expectedValue = item.hash
|
const expectedValue = item.hash
|
||||||
|
|
||||||
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
||||||
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
||||||
|
|
||||||
if (expectedValue.endsWith('==')) {
|
|
||||||
expectedValue = expectedValue.slice(0, -2)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Let actualValue be the result of applying algorithm to bytes.
|
// 3. Let actualValue be the result of applying algorithm to bytes.
|
||||||
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
||||||
|
|
||||||
if (actualValue.endsWith('==')) {
|
if (actualValue[actualValue.length - 1] === '=') {
|
||||||
|
if (actualValue[actualValue.length - 2] === '=') {
|
||||||
actualValue = actualValue.slice(0, -2)
|
actualValue = actualValue.slice(0, -2)
|
||||||
|
} else {
|
||||||
|
actualValue = actualValue.slice(0, -1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. If actualValue is a case-sensitive match for expectedValue,
|
// 4. If actualValue is a case-sensitive match for expectedValue,
|
||||||
// return true.
|
// return true.
|
||||||
if (actualValue === expectedValue) {
|
if (compareBase64Mixed(actualValue, expectedValue)) {
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
|
|
||||||
|
|
||||||
if (actualBase64URL.endsWith('==')) {
|
|
||||||
actualBase64URL = actualBase64URL.slice(0, -2)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (actualBase64URL === expectedValue) {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 6. Return false.
|
// 7. Return false.
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
||||||
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
||||||
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
||||||
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
|
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
||||||
@ -109552,8 +109688,6 @@ function parseMetadata (metadata) {
|
|||||||
// 2. Let empty be equal to true.
|
// 2. Let empty be equal to true.
|
||||||
let empty = true
|
let empty = true
|
||||||
|
|
||||||
const supportedHashes = crypto.getHashes()
|
|
||||||
|
|
||||||
// 3. For each token returned by splitting metadata on spaces:
|
// 3. For each token returned by splitting metadata on spaces:
|
||||||
for (const token of metadata.split(' ')) {
|
for (const token of metadata.split(' ')) {
|
||||||
// 1. Set empty to false.
|
// 1. Set empty to false.
|
||||||
@ -109563,7 +109697,11 @@ function parseMetadata (metadata) {
|
|||||||
const parsedToken = parseHashWithOptions.exec(token)
|
const parsedToken = parseHashWithOptions.exec(token)
|
||||||
|
|
||||||
// 3. If token does not parse, continue to the next token.
|
// 3. If token does not parse, continue to the next token.
|
||||||
if (parsedToken === null || parsedToken.groups === undefined) {
|
if (
|
||||||
|
parsedToken === null ||
|
||||||
|
parsedToken.groups === undefined ||
|
||||||
|
parsedToken.groups.algo === undefined
|
||||||
|
) {
|
||||||
// Note: Chromium blocks the request at this point, but Firefox
|
// Note: Chromium blocks the request at this point, but Firefox
|
||||||
// gives a warning that an invalid integrity was given. The
|
// gives a warning that an invalid integrity was given. The
|
||||||
// correct behavior is to ignore these, and subsequently not
|
// correct behavior is to ignore these, and subsequently not
|
||||||
@ -109572,11 +109710,11 @@ function parseMetadata (metadata) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 4. Let algorithm be the hash-algo component of token.
|
// 4. Let algorithm be the hash-algo component of token.
|
||||||
const algorithm = parsedToken.groups.algo
|
const algorithm = parsedToken.groups.algo.toLowerCase()
|
||||||
|
|
||||||
// 5. If algorithm is a hash function recognized by the user
|
// 5. If algorithm is a hash function recognized by the user
|
||||||
// agent, add the parsed token to result.
|
// agent, add the parsed token to result.
|
||||||
if (supportedHashes.includes(algorithm.toLowerCase())) {
|
if (supportedHashes.includes(algorithm)) {
|
||||||
result.push(parsedToken.groups)
|
result.push(parsedToken.groups)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -109589,6 +109727,82 @@ function parseMetadata (metadata) {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
||||||
|
*/
|
||||||
|
function getStrongestMetadata (metadataList) {
|
||||||
|
// Let algorithm be the algo component of the first item in metadataList.
|
||||||
|
// Can be sha256
|
||||||
|
let algorithm = metadataList[0].algo
|
||||||
|
// If the algorithm is sha512, then it is the strongest
|
||||||
|
// and we can return immediately
|
||||||
|
if (algorithm[3] === '5') {
|
||||||
|
return algorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 1; i < metadataList.length; ++i) {
|
||||||
|
const metadata = metadataList[i]
|
||||||
|
// If the algorithm is sha512, then it is the strongest
|
||||||
|
// and we can break the loop immediately
|
||||||
|
if (metadata.algo[3] === '5') {
|
||||||
|
algorithm = 'sha512'
|
||||||
|
break
|
||||||
|
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
||||||
|
} else if (algorithm[3] === '3') {
|
||||||
|
continue
|
||||||
|
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
||||||
|
// the strongest
|
||||||
|
} else if (metadata.algo[3] === '3') {
|
||||||
|
algorithm = 'sha384'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return algorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
||||||
|
if (metadataList.length === 1) {
|
||||||
|
return metadataList
|
||||||
|
}
|
||||||
|
|
||||||
|
let pos = 0
|
||||||
|
for (let i = 0; i < metadataList.length; ++i) {
|
||||||
|
if (metadataList[i].algo === algorithm) {
|
||||||
|
metadataList[pos++] = metadataList[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataList.length = pos
|
||||||
|
|
||||||
|
return metadataList
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares two base64 strings, allowing for base64url
|
||||||
|
* in the second string.
|
||||||
|
*
|
||||||
|
* @param {string} actualValue always base64
|
||||||
|
* @param {string} expectedValue base64 or base64url
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function compareBase64Mixed (actualValue, expectedValue) {
|
||||||
|
if (actualValue.length !== expectedValue.length) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for (let i = 0; i < actualValue.length; ++i) {
|
||||||
|
if (actualValue[i] !== expectedValue[i]) {
|
||||||
|
if (
|
||||||
|
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
||||||
|
(actualValue[i] === '/' && expectedValue[i] === '_')
|
||||||
|
) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
||||||
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
||||||
// TODO
|
// TODO
|
||||||
@ -110004,7 +110218,8 @@ module.exports = {
|
|||||||
urlHasHttpsScheme,
|
urlHasHttpsScheme,
|
||||||
urlIsHttpHttpsScheme,
|
urlIsHttpHttpsScheme,
|
||||||
readAllBytes,
|
readAllBytes,
|
||||||
normalizeMethodRecord
|
normalizeMethodRecord,
|
||||||
|
parseMetadata
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -112091,12 +112306,17 @@ function parseLocation (statusCode, headers) {
|
|||||||
|
|
||||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||||
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
||||||
return (
|
if (header.length === 4) {
|
||||||
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
|
return util.headerNameToString(header) === 'host'
|
||||||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
|
}
|
||||||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
|
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
|
||||||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
|
return true
|
||||||
)
|
}
|
||||||
|
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
|
||||||
|
const name = util.headerNameToString(header)
|
||||||
|
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://tools.ietf.org/html/rfc7231#section-6.4
|
// https://tools.ietf.org/html/rfc7231#section-6.4
|
||||||
@ -125552,6 +125772,7 @@ var Inputs;
|
|||||||
Inputs["RetentionDays"] = "retention-days";
|
Inputs["RetentionDays"] = "retention-days";
|
||||||
Inputs["CompressionLevel"] = "compression-level";
|
Inputs["CompressionLevel"] = "compression-level";
|
||||||
Inputs["DeleteMerged"] = "delete-merged";
|
Inputs["DeleteMerged"] = "delete-merged";
|
||||||
|
Inputs["IncludeHiddenFiles"] = "include-hidden-files";
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
|
|
||||||
|
|
||||||
@ -125635,13 +125856,15 @@ function getInputs() {
|
|||||||
const pattern = core.getInput(constants_1.Inputs.Pattern, { required: true });
|
const pattern = core.getInput(constants_1.Inputs.Pattern, { required: true });
|
||||||
const separateDirectories = core.getBooleanInput(constants_1.Inputs.SeparateDirectories);
|
const separateDirectories = core.getBooleanInput(constants_1.Inputs.SeparateDirectories);
|
||||||
const deleteMerged = core.getBooleanInput(constants_1.Inputs.DeleteMerged);
|
const deleteMerged = core.getBooleanInput(constants_1.Inputs.DeleteMerged);
|
||||||
|
const includeHiddenFiles = core.getBooleanInput(constants_1.Inputs.IncludeHiddenFiles);
|
||||||
const inputs = {
|
const inputs = {
|
||||||
name,
|
name,
|
||||||
pattern,
|
pattern,
|
||||||
separateDirectories,
|
separateDirectories,
|
||||||
deleteMerged,
|
deleteMerged,
|
||||||
retentionDays: 0,
|
retentionDays: 0,
|
||||||
compressionLevel: 6
|
compressionLevel: 6,
|
||||||
|
includeHiddenFiles
|
||||||
};
|
};
|
||||||
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
||||||
if (retentionDaysStr) {
|
if (retentionDaysStr) {
|
||||||
@ -125757,7 +125980,7 @@ function run() {
|
|||||||
if (typeof inputs.compressionLevel !== 'undefined') {
|
if (typeof inputs.compressionLevel !== 'undefined') {
|
||||||
options.compressionLevel = inputs.compressionLevel;
|
options.compressionLevel = inputs.compressionLevel;
|
||||||
}
|
}
|
||||||
const searchResult = yield (0, search_1.findFilesToUpload)(tmpDir);
|
const searchResult = yield (0, search_1.findFilesToUpload)(tmpDir, inputs.includeHiddenFiles);
|
||||||
yield (0, upload_artifact_1.uploadArtifact)(inputs.name, searchResult.filesToUpload, searchResult.rootDirectory, options);
|
yield (0, upload_artifact_1.uploadArtifact)(inputs.name, searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||||
core.info(`The ${artifacts.length} artifact(s) have been successfully merged!`);
|
core.info(`The ${artifacts.length} artifact(s) have been successfully merged!`);
|
||||||
if (inputs.deleteMerged) {
|
if (inputs.deleteMerged) {
|
||||||
@ -125824,11 +126047,12 @@ const fs_1 = __nccwpck_require__(57147);
|
|||||||
const path_1 = __nccwpck_require__(71017);
|
const path_1 = __nccwpck_require__(71017);
|
||||||
const util_1 = __nccwpck_require__(73837);
|
const util_1 = __nccwpck_require__(73837);
|
||||||
const stats = (0, util_1.promisify)(fs_1.stat);
|
const stats = (0, util_1.promisify)(fs_1.stat);
|
||||||
function getDefaultGlobOptions() {
|
function getDefaultGlobOptions(includeHiddenFiles) {
|
||||||
return {
|
return {
|
||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true,
|
||||||
|
excludeHiddenFiles: !includeHiddenFiles
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@ -125882,10 +126106,10 @@ function getMultiPathLCA(searchPaths) {
|
|||||||
}
|
}
|
||||||
return path.join(...commonPaths);
|
return path.join(...commonPaths);
|
||||||
}
|
}
|
||||||
function findFilesToUpload(searchPath, globOptions) {
|
function findFilesToUpload(searchPath, includeHiddenFiles) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const searchResults = [];
|
const searchResults = [];
|
||||||
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
|
const globber = yield glob.create(searchPath, getDefaultGlobOptions(includeHiddenFiles || false));
|
||||||
const rawSearchResults = yield globber.glob();
|
const rawSearchResults = yield globber.glob();
|
||||||
/*
|
/*
|
||||||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||||
@ -135977,7 +136201,7 @@ module.exports = index;
|
|||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.6","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
512
dist/upload/index.js
vendored
512
dist/upload/index.js
vendored
@ -2328,9 +2328,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
|
||||||
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
const promises_1 = __importDefault(__nccwpck_require__(73292));
|
||||||
const stream = __importStar(__nccwpck_require__(12781));
|
|
||||||
const fs_1 = __nccwpck_require__(57147);
|
|
||||||
const path = __importStar(__nccwpck_require__(71017));
|
|
||||||
const github = __importStar(__nccwpck_require__(21260));
|
const github = __importStar(__nccwpck_require__(21260));
|
||||||
const core = __importStar(__nccwpck_require__(42186));
|
const core = __importStar(__nccwpck_require__(42186));
|
||||||
const httpClient = __importStar(__nccwpck_require__(96255));
|
const httpClient = __importStar(__nccwpck_require__(96255));
|
||||||
@ -2371,9 +2368,6 @@ function streamExtract(url, directory) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
if (error.message.includes('Malformed extraction path')) {
|
|
||||||
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
|
|
||||||
}
|
|
||||||
retryCount++;
|
retryCount++;
|
||||||
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
|
||||||
// wait 5 seconds before retrying
|
// wait 5 seconds before retrying
|
||||||
@ -2396,8 +2390,6 @@ function streamExtractExternal(url, directory) {
|
|||||||
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
|
||||||
};
|
};
|
||||||
const timer = setTimeout(timerFn, timeout);
|
const timer = setTimeout(timerFn, timeout);
|
||||||
const createdDirectories = new Set();
|
|
||||||
createdDirectories.add(directory);
|
|
||||||
response.message
|
response.message
|
||||||
.on('data', () => {
|
.on('data', () => {
|
||||||
timer.refresh();
|
timer.refresh();
|
||||||
@ -2407,47 +2399,11 @@ function streamExtractExternal(url, directory) {
|
|||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
reject(error);
|
reject(error);
|
||||||
})
|
})
|
||||||
.pipe(unzip_stream_1.default.Parse())
|
.pipe(unzip_stream_1.default.Extract({ path: directory }))
|
||||||
.pipe(new stream.Transform({
|
.on('close', () => {
|
||||||
objectMode: true,
|
|
||||||
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
const fullPath = path.normalize(path.join(directory, entry.path));
|
|
||||||
if (!directory.endsWith(path.sep)) {
|
|
||||||
directory += path.sep;
|
|
||||||
}
|
|
||||||
if (!fullPath.startsWith(directory)) {
|
|
||||||
reject(new Error(`Malformed extraction path: ${fullPath}`));
|
|
||||||
}
|
|
||||||
if (entry.type === 'Directory') {
|
|
||||||
if (!createdDirectories.has(fullPath)) {
|
|
||||||
createdDirectories.add(fullPath);
|
|
||||||
yield resolveOrCreateDirectory(fullPath).then(() => {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
entry.autodrain();
|
|
||||||
callback();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
core.info(`Extracting artifact entry: ${fullPath}`);
|
|
||||||
if (!createdDirectories.has(path.dirname(fullPath))) {
|
|
||||||
createdDirectories.add(path.dirname(fullPath));
|
|
||||||
yield resolveOrCreateDirectory(path.dirname(fullPath));
|
|
||||||
}
|
|
||||||
const writeStream = (0, fs_1.createWriteStream)(fullPath);
|
|
||||||
writeStream.on('finish', callback);
|
|
||||||
writeStream.on('error', reject);
|
|
||||||
entry.pipe(writeStream);
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}))
|
|
||||||
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
|
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
resolve();
|
resolve();
|
||||||
}))
|
})
|
||||||
.on('error', (error) => {
|
.on('error', (error) => {
|
||||||
reject(error);
|
reject(error);
|
||||||
});
|
});
|
||||||
@ -3069,8 +3025,9 @@ function isGhes() {
|
|||||||
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
|
||||||
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
|
||||||
const isGitHubHost = hostname === 'GITHUB.COM';
|
const isGitHubHost = hostname === 'GITHUB.COM';
|
||||||
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
|
const isGheHost = hostname.endsWith('.GHE.COM');
|
||||||
return !isGitHubHost && !isGheHost;
|
const isLocalHost = hostname.endsWith('.LOCALHOST');
|
||||||
|
return !isGitHubHost && !isGheHost && !isLocalHost;
|
||||||
}
|
}
|
||||||
exports.isGhes = isGhes;
|
exports.isGhes = isGhes;
|
||||||
function getGitHubWorkspaceDir() {
|
function getGitHubWorkspaceDir() {
|
||||||
@ -8924,16 +8881,18 @@ exports.create = create;
|
|||||||
* Computes the sha256 hash of a glob
|
* Computes the sha256 hash of a glob
|
||||||
*
|
*
|
||||||
* @param patterns Patterns separated by newlines
|
* @param patterns Patterns separated by newlines
|
||||||
|
* @param currentWorkspace Workspace used when matching files
|
||||||
* @param options Glob options
|
* @param options Glob options
|
||||||
|
* @param verbose Enables verbose logging
|
||||||
*/
|
*/
|
||||||
function hashFiles(patterns, options, verbose = false) {
|
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
let followSymbolicLinks = true;
|
let followSymbolicLinks = true;
|
||||||
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||||
followSymbolicLinks = options.followSymbolicLinks;
|
followSymbolicLinks = options.followSymbolicLinks;
|
||||||
}
|
}
|
||||||
const globber = yield create(patterns, { followSymbolicLinks });
|
const globber = yield create(patterns, { followSymbolicLinks });
|
||||||
return internal_hash_files_1.hashFiles(globber, verbose);
|
return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.hashFiles = hashFiles;
|
exports.hashFiles = hashFiles;
|
||||||
@ -8948,7 +8907,11 @@ exports.hashFiles = hashFiles;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -8961,7 +8924,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -8976,7 +8939,8 @@ function getOptions(copy) {
|
|||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
matchDirectories: true,
|
matchDirectories: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true,
|
||||||
|
excludeHiddenFiles: false
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
if (typeof copy.followSymbolicLinks === 'boolean') {
|
if (typeof copy.followSymbolicLinks === 'boolean') {
|
||||||
@ -8995,6 +8959,10 @@ function getOptions(copy) {
|
|||||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||||
}
|
}
|
||||||
|
if (typeof copy.excludeHiddenFiles === 'boolean') {
|
||||||
|
result.excludeHiddenFiles = copy.excludeHiddenFiles;
|
||||||
|
core.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -9010,7 +8978,11 @@ exports.getOptions = getOptions;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9023,7 +8995,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9077,19 +9049,21 @@ class DefaultGlobber {
|
|||||||
return this.searchPaths.slice();
|
return this.searchPaths.slice();
|
||||||
}
|
}
|
||||||
glob() {
|
glob() {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const result = [];
|
const result = [];
|
||||||
try {
|
try {
|
||||||
for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
for (var _d = true, _e = __asyncValues(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||||
const itemPath = _c.value;
|
_c = _f.value;
|
||||||
|
_d = false;
|
||||||
|
const itemPath = _c;
|
||||||
result.push(itemPath);
|
result.push(itemPath);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@ -9147,6 +9121,10 @@ class DefaultGlobber {
|
|||||||
if (!stats) {
|
if (!stats) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
// Hidden file or directory?
|
||||||
|
if (options.excludeHiddenFiles && path.basename(item.path).match(/^\./)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
// Directory
|
// Directory
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory()) {
|
||||||
// Matched
|
// Matched
|
||||||
@ -9252,7 +9230,11 @@ exports.DefaultGlobber = DefaultGlobber;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9265,7 +9247,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9293,18 +9275,22 @@ const fs = __importStar(__nccwpck_require__(57147));
|
|||||||
const stream = __importStar(__nccwpck_require__(12781));
|
const stream = __importStar(__nccwpck_require__(12781));
|
||||||
const util = __importStar(__nccwpck_require__(73837));
|
const util = __importStar(__nccwpck_require__(73837));
|
||||||
const path = __importStar(__nccwpck_require__(71017));
|
const path = __importStar(__nccwpck_require__(71017));
|
||||||
function hashFiles(globber, verbose = false) {
|
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
var _b;
|
var _d;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const writeDelegate = verbose ? core.info : core.debug;
|
const writeDelegate = verbose ? core.info : core.debug;
|
||||||
let hasMatch = false;
|
let hasMatch = false;
|
||||||
const githubWorkspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
const githubWorkspace = currentWorkspace
|
||||||
|
? currentWorkspace
|
||||||
|
: (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||||
const result = crypto.createHash('sha256');
|
const result = crypto.createHash('sha256');
|
||||||
let count = 0;
|
let count = 0;
|
||||||
try {
|
try {
|
||||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||||
const file = _d.value;
|
_c = _g.value;
|
||||||
|
_e = false;
|
||||||
|
const file = _c;
|
||||||
writeDelegate(file);
|
writeDelegate(file);
|
||||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
@ -9327,7 +9313,7 @@ function hashFiles(globber, verbose = false) {
|
|||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@ -9367,7 +9353,7 @@ var MatchKind;
|
|||||||
MatchKind[MatchKind["File"] = 2] = "File";
|
MatchKind[MatchKind["File"] = 2] = "File";
|
||||||
/** Matched */
|
/** Matched */
|
||||||
MatchKind[MatchKind["All"] = 3] = "All";
|
MatchKind[MatchKind["All"] = 3] = "All";
|
||||||
})(MatchKind = exports.MatchKind || (exports.MatchKind = {}));
|
})(MatchKind || (exports.MatchKind = MatchKind = {}));
|
||||||
//# sourceMappingURL=internal-match-kind.js.map
|
//# sourceMappingURL=internal-match-kind.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@ -9379,7 +9365,11 @@ var MatchKind;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9392,7 +9382,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9442,8 +9432,8 @@ exports.dirname = dirname;
|
|||||||
* or `C:` are expanded based on the current working directory.
|
* or `C:` are expanded based on the current working directory.
|
||||||
*/
|
*/
|
||||||
function ensureAbsoluteRoot(root, itemPath) {
|
function ensureAbsoluteRoot(root, itemPath) {
|
||||||
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
(0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||||
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Already rooted
|
// Already rooted
|
||||||
if (hasAbsoluteRoot(itemPath)) {
|
if (hasAbsoluteRoot(itemPath)) {
|
||||||
return itemPath;
|
return itemPath;
|
||||||
@ -9453,7 +9443,7 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like C: or C:foo
|
// Check for itemPath like C: or C:foo
|
||||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||||
let cwd = process.cwd();
|
let cwd = process.cwd();
|
||||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
(0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
// Drive letter matches cwd? Expand to cwd
|
// Drive letter matches cwd? Expand to cwd
|
||||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||||
// Drive only, e.g. C:
|
// Drive only, e.g. C:
|
||||||
@ -9478,11 +9468,11 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like \ or \foo
|
// Check for itemPath like \ or \foo
|
||||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
(0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
(0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||||
// Otherwise ensure root ends with a separator
|
// Otherwise ensure root ends with a separator
|
||||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||||
// Intentionally empty
|
// Intentionally empty
|
||||||
@ -9499,7 +9489,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
|||||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasAbsoluteRoot(itemPath) {
|
function hasAbsoluteRoot(itemPath) {
|
||||||
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@ -9516,7 +9506,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
|||||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasRoot(itemPath) {
|
function hasRoot(itemPath) {
|
||||||
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@ -9584,7 +9574,11 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9597,7 +9591,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9622,7 +9616,7 @@ class Path {
|
|||||||
this.segments = [];
|
this.segments = [];
|
||||||
// String
|
// String
|
||||||
if (typeof itemPath === 'string') {
|
if (typeof itemPath === 'string') {
|
||||||
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
(0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||||
// Normalize slashes and trim unnecessary trailing slash
|
// Normalize slashes and trim unnecessary trailing slash
|
||||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||||
// Not rooted
|
// Not rooted
|
||||||
@ -9649,24 +9643,24 @@ class Path {
|
|||||||
// Array
|
// Array
|
||||||
else {
|
else {
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
(0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||||
// Each segment
|
// Each segment
|
||||||
for (let i = 0; i < itemPath.length; i++) {
|
for (let i = 0; i < itemPath.length; i++) {
|
||||||
let segment = itemPath[i];
|
let segment = itemPath[i];
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
(0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||||
// Root segment
|
// Root segment
|
||||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||||
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
(0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
// All other segments
|
// All other segments
|
||||||
else {
|
else {
|
||||||
// Must not contain slash
|
// Must not contain slash
|
||||||
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
(0, assert_1.default)(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -9704,7 +9698,11 @@ exports.Path = Path;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9717,7 +9715,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9805,7 +9803,11 @@ exports.partialMatch = partialMatch;
|
|||||||
|
|
||||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
}) : (function(o, m, k, k2) {
|
}) : (function(o, m, k, k2) {
|
||||||
if (k2 === undefined) k2 = k;
|
if (k2 === undefined) k2 = k;
|
||||||
o[k2] = m[k];
|
o[k2] = m[k];
|
||||||
@ -9818,7 +9820,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
if (mod && mod.__esModule) return mod;
|
if (mod && mod.__esModule) return mod;
|
||||||
var result = {};
|
var result = {};
|
||||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
__setModuleDefault(result, mod);
|
__setModuleDefault(result, mod);
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
@ -9850,9 +9852,9 @@ class Pattern {
|
|||||||
else {
|
else {
|
||||||
// Convert to pattern
|
// Convert to pattern
|
||||||
segments = segments || [];
|
segments = segments || [];
|
||||||
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
(0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`);
|
||||||
const root = Pattern.getLiteral(segments[0]);
|
const root = Pattern.getLiteral(segments[0]);
|
||||||
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
(0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||||
if (patternOrNegate) {
|
if (patternOrNegate) {
|
||||||
pattern = `!${pattern}`;
|
pattern = `!${pattern}`;
|
||||||
@ -9946,13 +9948,13 @@ class Pattern {
|
|||||||
*/
|
*/
|
||||||
static fixupPattern(pattern, homedir) {
|
static fixupPattern(pattern, homedir) {
|
||||||
// Empty
|
// Empty
|
||||||
assert_1.default(pattern, 'pattern cannot be empty');
|
(0, assert_1.default)(pattern, 'pattern cannot be empty');
|
||||||
// Must not contain `.` segment, unless first segment
|
// Must not contain `.` segment, unless first segment
|
||||||
// Must not contain `..` segment
|
// Must not contain `..` segment
|
||||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||||
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
(0, assert_1.default)(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||||
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
(0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
pattern = pathHelper.normalizeSeparators(pattern);
|
pattern = pathHelper.normalizeSeparators(pattern);
|
||||||
// Replace leading `.` segment
|
// Replace leading `.` segment
|
||||||
@ -9962,8 +9964,8 @@ class Pattern {
|
|||||||
// Replace leading `~` segment
|
// Replace leading `~` segment
|
||||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||||
homedir = homedir || os.homedir();
|
homedir = homedir || os.homedir();
|
||||||
assert_1.default(homedir, 'Unable to determine HOME directory');
|
(0, assert_1.default)(homedir, 'Unable to determine HOME directory');
|
||||||
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
(0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||||
}
|
}
|
||||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||||
@ -100988,6 +100990,132 @@ function onConnectTimeout (socket) {
|
|||||||
module.exports = buildConnector
|
module.exports = buildConnector
|
||||||
|
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 14462:
|
||||||
|
/***/ ((module) => {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
|
||||||
|
/** @type {Record<string, string | undefined>} */
|
||||||
|
const headerNameLowerCasedRecord = {}
|
||||||
|
|
||||||
|
// https://developer.mozilla.org/docs/Web/HTTP/Headers
|
||||||
|
const wellknownHeaderNames = [
|
||||||
|
'Accept',
|
||||||
|
'Accept-Encoding',
|
||||||
|
'Accept-Language',
|
||||||
|
'Accept-Ranges',
|
||||||
|
'Access-Control-Allow-Credentials',
|
||||||
|
'Access-Control-Allow-Headers',
|
||||||
|
'Access-Control-Allow-Methods',
|
||||||
|
'Access-Control-Allow-Origin',
|
||||||
|
'Access-Control-Expose-Headers',
|
||||||
|
'Access-Control-Max-Age',
|
||||||
|
'Access-Control-Request-Headers',
|
||||||
|
'Access-Control-Request-Method',
|
||||||
|
'Age',
|
||||||
|
'Allow',
|
||||||
|
'Alt-Svc',
|
||||||
|
'Alt-Used',
|
||||||
|
'Authorization',
|
||||||
|
'Cache-Control',
|
||||||
|
'Clear-Site-Data',
|
||||||
|
'Connection',
|
||||||
|
'Content-Disposition',
|
||||||
|
'Content-Encoding',
|
||||||
|
'Content-Language',
|
||||||
|
'Content-Length',
|
||||||
|
'Content-Location',
|
||||||
|
'Content-Range',
|
||||||
|
'Content-Security-Policy',
|
||||||
|
'Content-Security-Policy-Report-Only',
|
||||||
|
'Content-Type',
|
||||||
|
'Cookie',
|
||||||
|
'Cross-Origin-Embedder-Policy',
|
||||||
|
'Cross-Origin-Opener-Policy',
|
||||||
|
'Cross-Origin-Resource-Policy',
|
||||||
|
'Date',
|
||||||
|
'Device-Memory',
|
||||||
|
'Downlink',
|
||||||
|
'ECT',
|
||||||
|
'ETag',
|
||||||
|
'Expect',
|
||||||
|
'Expect-CT',
|
||||||
|
'Expires',
|
||||||
|
'Forwarded',
|
||||||
|
'From',
|
||||||
|
'Host',
|
||||||
|
'If-Match',
|
||||||
|
'If-Modified-Since',
|
||||||
|
'If-None-Match',
|
||||||
|
'If-Range',
|
||||||
|
'If-Unmodified-Since',
|
||||||
|
'Keep-Alive',
|
||||||
|
'Last-Modified',
|
||||||
|
'Link',
|
||||||
|
'Location',
|
||||||
|
'Max-Forwards',
|
||||||
|
'Origin',
|
||||||
|
'Permissions-Policy',
|
||||||
|
'Pragma',
|
||||||
|
'Proxy-Authenticate',
|
||||||
|
'Proxy-Authorization',
|
||||||
|
'RTT',
|
||||||
|
'Range',
|
||||||
|
'Referer',
|
||||||
|
'Referrer-Policy',
|
||||||
|
'Refresh',
|
||||||
|
'Retry-After',
|
||||||
|
'Sec-WebSocket-Accept',
|
||||||
|
'Sec-WebSocket-Extensions',
|
||||||
|
'Sec-WebSocket-Key',
|
||||||
|
'Sec-WebSocket-Protocol',
|
||||||
|
'Sec-WebSocket-Version',
|
||||||
|
'Server',
|
||||||
|
'Server-Timing',
|
||||||
|
'Service-Worker-Allowed',
|
||||||
|
'Service-Worker-Navigation-Preload',
|
||||||
|
'Set-Cookie',
|
||||||
|
'SourceMap',
|
||||||
|
'Strict-Transport-Security',
|
||||||
|
'Supports-Loading-Mode',
|
||||||
|
'TE',
|
||||||
|
'Timing-Allow-Origin',
|
||||||
|
'Trailer',
|
||||||
|
'Transfer-Encoding',
|
||||||
|
'Upgrade',
|
||||||
|
'Upgrade-Insecure-Requests',
|
||||||
|
'User-Agent',
|
||||||
|
'Vary',
|
||||||
|
'Via',
|
||||||
|
'WWW-Authenticate',
|
||||||
|
'X-Content-Type-Options',
|
||||||
|
'X-DNS-Prefetch-Control',
|
||||||
|
'X-Frame-Options',
|
||||||
|
'X-Permitted-Cross-Domain-Policies',
|
||||||
|
'X-Powered-By',
|
||||||
|
'X-Requested-With',
|
||||||
|
'X-XSS-Protection'
|
||||||
|
]
|
||||||
|
|
||||||
|
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
|
||||||
|
const key = wellknownHeaderNames[i]
|
||||||
|
const lowerCasedKey = key.toLowerCase()
|
||||||
|
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
|
||||||
|
lowerCasedKey
|
||||||
|
}
|
||||||
|
|
||||||
|
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
|
||||||
|
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
wellknownHeaderNames,
|
||||||
|
headerNameLowerCasedRecord
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 48045:
|
/***/ 48045:
|
||||||
@ -101820,6 +101948,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045)
|
|||||||
const { Blob } = __nccwpck_require__(14300)
|
const { Blob } = __nccwpck_require__(14300)
|
||||||
const nodeUtil = __nccwpck_require__(73837)
|
const nodeUtil = __nccwpck_require__(73837)
|
||||||
const { stringify } = __nccwpck_require__(63477)
|
const { stringify } = __nccwpck_require__(63477)
|
||||||
|
const { headerNameLowerCasedRecord } = __nccwpck_require__(14462)
|
||||||
|
|
||||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
|
||||||
|
|
||||||
@ -102029,6 +102158,15 @@ function parseKeepAliveTimeout (val) {
|
|||||||
return m ? parseInt(m[1], 10) * 1000 : null
|
return m ? parseInt(m[1], 10) * 1000 : null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Retrieves a header name and returns its lowercase value.
|
||||||
|
* @param {string | Buffer} value Header name
|
||||||
|
* @returns {string}
|
||||||
|
*/
|
||||||
|
function headerNameToString (value) {
|
||||||
|
return headerNameLowerCasedRecord[value] || value.toLowerCase()
|
||||||
|
}
|
||||||
|
|
||||||
function parseHeaders (headers, obj = {}) {
|
function parseHeaders (headers, obj = {}) {
|
||||||
// For H2 support
|
// For H2 support
|
||||||
if (!Array.isArray(headers)) return headers
|
if (!Array.isArray(headers)) return headers
|
||||||
@ -102300,6 +102438,7 @@ module.exports = {
|
|||||||
isIterable,
|
isIterable,
|
||||||
isAsyncIterable,
|
isAsyncIterable,
|
||||||
isDestroyed,
|
isDestroyed,
|
||||||
|
headerNameToString,
|
||||||
parseRawHeaders,
|
parseRawHeaders,
|
||||||
parseHeaders,
|
parseHeaders,
|
||||||
parseKeepAliveTimeout,
|
parseKeepAliveTimeout,
|
||||||
@ -106436,6 +106575,9 @@ function httpRedirectFetch (fetchParams, response) {
|
|||||||
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
|
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
|
||||||
request.headersList.delete('authorization')
|
request.headersList.delete('authorization')
|
||||||
|
|
||||||
|
// https://fetch.spec.whatwg.org/#authentication-entries
|
||||||
|
request.headersList.delete('proxy-authorization', true)
|
||||||
|
|
||||||
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
|
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
|
||||||
request.headersList.delete('cookie')
|
request.headersList.delete('cookie')
|
||||||
request.headersList.delete('host')
|
request.headersList.delete('host')
|
||||||
@ -108944,14 +109086,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398
|
|||||||
const assert = __nccwpck_require__(39491)
|
const assert = __nccwpck_require__(39491)
|
||||||
const { isUint8Array } = __nccwpck_require__(29830)
|
const { isUint8Array } = __nccwpck_require__(29830)
|
||||||
|
|
||||||
|
let supportedHashes = []
|
||||||
|
|
||||||
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
|
||||||
/** @type {import('crypto')|undefined} */
|
/** @type {import('crypto')|undefined} */
|
||||||
let crypto
|
let crypto
|
||||||
|
|
||||||
try {
|
try {
|
||||||
crypto = __nccwpck_require__(6113)
|
crypto = __nccwpck_require__(6113)
|
||||||
|
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
|
||||||
|
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
|
||||||
|
/* c8 ignore next 3 */
|
||||||
} catch {
|
} catch {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function responseURL (response) {
|
function responseURL (response) {
|
||||||
@ -109479,66 +109625,56 @@ function bytesMatch (bytes, metadataList) {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. If parsedMetadata is the empty set, return true.
|
// 3. If response is not eligible for integrity validation, return false.
|
||||||
|
// TODO
|
||||||
|
|
||||||
|
// 4. If parsedMetadata is the empty set, return true.
|
||||||
if (parsedMetadata.length === 0) {
|
if (parsedMetadata.length === 0) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. Let metadata be the result of getting the strongest
|
// 5. Let metadata be the result of getting the strongest
|
||||||
// metadata from parsedMetadata.
|
// metadata from parsedMetadata.
|
||||||
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
|
const strongest = getStrongestMetadata(parsedMetadata)
|
||||||
// get the strongest algorithm
|
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
|
||||||
const strongest = list[0].algo
|
|
||||||
// get all entries that use the strongest algorithm; ignore weaker
|
|
||||||
const metadata = list.filter((item) => item.algo === strongest)
|
|
||||||
|
|
||||||
// 5. For each item in metadata:
|
// 6. For each item in metadata:
|
||||||
for (const item of metadata) {
|
for (const item of metadata) {
|
||||||
// 1. Let algorithm be the alg component of item.
|
// 1. Let algorithm be the alg component of item.
|
||||||
const algorithm = item.algo
|
const algorithm = item.algo
|
||||||
|
|
||||||
// 2. Let expectedValue be the val component of item.
|
// 2. Let expectedValue be the val component of item.
|
||||||
let expectedValue = item.hash
|
const expectedValue = item.hash
|
||||||
|
|
||||||
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
|
||||||
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
// "be liberal with padding". This is annoying, and it's not even in the spec.
|
||||||
|
|
||||||
if (expectedValue.endsWith('==')) {
|
|
||||||
expectedValue = expectedValue.slice(0, -2)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 3. Let actualValue be the result of applying algorithm to bytes.
|
// 3. Let actualValue be the result of applying algorithm to bytes.
|
||||||
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
|
||||||
|
|
||||||
if (actualValue.endsWith('==')) {
|
if (actualValue[actualValue.length - 1] === '=') {
|
||||||
|
if (actualValue[actualValue.length - 2] === '=') {
|
||||||
actualValue = actualValue.slice(0, -2)
|
actualValue = actualValue.slice(0, -2)
|
||||||
|
} else {
|
||||||
|
actualValue = actualValue.slice(0, -1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4. If actualValue is a case-sensitive match for expectedValue,
|
// 4. If actualValue is a case-sensitive match for expectedValue,
|
||||||
// return true.
|
// return true.
|
||||||
if (actualValue === expectedValue) {
|
if (compareBase64Mixed(actualValue, expectedValue)) {
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
|
|
||||||
|
|
||||||
if (actualBase64URL.endsWith('==')) {
|
|
||||||
actualBase64URL = actualBase64URL.slice(0, -2)
|
|
||||||
}
|
|
||||||
|
|
||||||
if (actualBase64URL === expectedValue) {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 6. Return false.
|
// 7. Return false.
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
|
||||||
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
// https://www.w3.org/TR/CSP2/#source-list-syntax
|
||||||
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
|
||||||
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
|
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
|
||||||
@ -109552,8 +109688,6 @@ function parseMetadata (metadata) {
|
|||||||
// 2. Let empty be equal to true.
|
// 2. Let empty be equal to true.
|
||||||
let empty = true
|
let empty = true
|
||||||
|
|
||||||
const supportedHashes = crypto.getHashes()
|
|
||||||
|
|
||||||
// 3. For each token returned by splitting metadata on spaces:
|
// 3. For each token returned by splitting metadata on spaces:
|
||||||
for (const token of metadata.split(' ')) {
|
for (const token of metadata.split(' ')) {
|
||||||
// 1. Set empty to false.
|
// 1. Set empty to false.
|
||||||
@ -109563,7 +109697,11 @@ function parseMetadata (metadata) {
|
|||||||
const parsedToken = parseHashWithOptions.exec(token)
|
const parsedToken = parseHashWithOptions.exec(token)
|
||||||
|
|
||||||
// 3. If token does not parse, continue to the next token.
|
// 3. If token does not parse, continue to the next token.
|
||||||
if (parsedToken === null || parsedToken.groups === undefined) {
|
if (
|
||||||
|
parsedToken === null ||
|
||||||
|
parsedToken.groups === undefined ||
|
||||||
|
parsedToken.groups.algo === undefined
|
||||||
|
) {
|
||||||
// Note: Chromium blocks the request at this point, but Firefox
|
// Note: Chromium blocks the request at this point, but Firefox
|
||||||
// gives a warning that an invalid integrity was given. The
|
// gives a warning that an invalid integrity was given. The
|
||||||
// correct behavior is to ignore these, and subsequently not
|
// correct behavior is to ignore these, and subsequently not
|
||||||
@ -109572,11 +109710,11 @@ function parseMetadata (metadata) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 4. Let algorithm be the hash-algo component of token.
|
// 4. Let algorithm be the hash-algo component of token.
|
||||||
const algorithm = parsedToken.groups.algo
|
const algorithm = parsedToken.groups.algo.toLowerCase()
|
||||||
|
|
||||||
// 5. If algorithm is a hash function recognized by the user
|
// 5. If algorithm is a hash function recognized by the user
|
||||||
// agent, add the parsed token to result.
|
// agent, add the parsed token to result.
|
||||||
if (supportedHashes.includes(algorithm.toLowerCase())) {
|
if (supportedHashes.includes(algorithm)) {
|
||||||
result.push(parsedToken.groups)
|
result.push(parsedToken.groups)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -109589,6 +109727,82 @@ function parseMetadata (metadata) {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
|
||||||
|
*/
|
||||||
|
function getStrongestMetadata (metadataList) {
|
||||||
|
// Let algorithm be the algo component of the first item in metadataList.
|
||||||
|
// Can be sha256
|
||||||
|
let algorithm = metadataList[0].algo
|
||||||
|
// If the algorithm is sha512, then it is the strongest
|
||||||
|
// and we can return immediately
|
||||||
|
if (algorithm[3] === '5') {
|
||||||
|
return algorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 1; i < metadataList.length; ++i) {
|
||||||
|
const metadata = metadataList[i]
|
||||||
|
// If the algorithm is sha512, then it is the strongest
|
||||||
|
// and we can break the loop immediately
|
||||||
|
if (metadata.algo[3] === '5') {
|
||||||
|
algorithm = 'sha512'
|
||||||
|
break
|
||||||
|
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
|
||||||
|
} else if (algorithm[3] === '3') {
|
||||||
|
continue
|
||||||
|
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
|
||||||
|
// the strongest
|
||||||
|
} else if (metadata.algo[3] === '3') {
|
||||||
|
algorithm = 'sha384'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return algorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterMetadataListByAlgorithm (metadataList, algorithm) {
|
||||||
|
if (metadataList.length === 1) {
|
||||||
|
return metadataList
|
||||||
|
}
|
||||||
|
|
||||||
|
let pos = 0
|
||||||
|
for (let i = 0; i < metadataList.length; ++i) {
|
||||||
|
if (metadataList[i].algo === algorithm) {
|
||||||
|
metadataList[pos++] = metadataList[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
metadataList.length = pos
|
||||||
|
|
||||||
|
return metadataList
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compares two base64 strings, allowing for base64url
|
||||||
|
* in the second string.
|
||||||
|
*
|
||||||
|
* @param {string} actualValue always base64
|
||||||
|
* @param {string} expectedValue base64 or base64url
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function compareBase64Mixed (actualValue, expectedValue) {
|
||||||
|
if (actualValue.length !== expectedValue.length) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for (let i = 0; i < actualValue.length; ++i) {
|
||||||
|
if (actualValue[i] !== expectedValue[i]) {
|
||||||
|
if (
|
||||||
|
(actualValue[i] === '+' && expectedValue[i] === '-') ||
|
||||||
|
(actualValue[i] === '/' && expectedValue[i] === '_')
|
||||||
|
) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
|
||||||
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
|
||||||
// TODO
|
// TODO
|
||||||
@ -110004,7 +110218,8 @@ module.exports = {
|
|||||||
urlHasHttpsScheme,
|
urlHasHttpsScheme,
|
||||||
urlIsHttpHttpsScheme,
|
urlIsHttpHttpsScheme,
|
||||||
readAllBytes,
|
readAllBytes,
|
||||||
normalizeMethodRecord
|
normalizeMethodRecord,
|
||||||
|
parseMetadata
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -112091,12 +112306,17 @@ function parseLocation (statusCode, headers) {
|
|||||||
|
|
||||||
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
// https://tools.ietf.org/html/rfc7231#section-6.4.4
|
||||||
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
|
||||||
return (
|
if (header.length === 4) {
|
||||||
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
|
return util.headerNameToString(header) === 'host'
|
||||||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
|
}
|
||||||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
|
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
|
||||||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
|
return true
|
||||||
)
|
}
|
||||||
|
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
|
||||||
|
const name = util.headerNameToString(header)
|
||||||
|
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
|
||||||
|
}
|
||||||
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// https://tools.ietf.org/html/rfc7231#section-6.4
|
// https://tools.ietf.org/html/rfc7231#section-6.4
|
||||||
@ -125582,11 +125802,12 @@ const fs_1 = __nccwpck_require__(57147);
|
|||||||
const path_1 = __nccwpck_require__(71017);
|
const path_1 = __nccwpck_require__(71017);
|
||||||
const util_1 = __nccwpck_require__(73837);
|
const util_1 = __nccwpck_require__(73837);
|
||||||
const stats = (0, util_1.promisify)(fs_1.stat);
|
const stats = (0, util_1.promisify)(fs_1.stat);
|
||||||
function getDefaultGlobOptions() {
|
function getDefaultGlobOptions(includeHiddenFiles) {
|
||||||
return {
|
return {
|
||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true,
|
||||||
|
excludeHiddenFiles: !includeHiddenFiles
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
@ -125640,10 +125861,10 @@ function getMultiPathLCA(searchPaths) {
|
|||||||
}
|
}
|
||||||
return path.join(...commonPaths);
|
return path.join(...commonPaths);
|
||||||
}
|
}
|
||||||
function findFilesToUpload(searchPath, globOptions) {
|
function findFilesToUpload(searchPath, includeHiddenFiles) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const searchResults = [];
|
const searchResults = [];
|
||||||
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
|
const globber = yield glob.create(searchPath, getDefaultGlobOptions(includeHiddenFiles || false));
|
||||||
const rawSearchResults = yield globber.glob();
|
const rawSearchResults = yield globber.glob();
|
||||||
/*
|
/*
|
||||||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||||
@ -125781,6 +126002,7 @@ var Inputs;
|
|||||||
Inputs["RetentionDays"] = "retention-days";
|
Inputs["RetentionDays"] = "retention-days";
|
||||||
Inputs["CompressionLevel"] = "compression-level";
|
Inputs["CompressionLevel"] = "compression-level";
|
||||||
Inputs["Overwrite"] = "overwrite";
|
Inputs["Overwrite"] = "overwrite";
|
||||||
|
Inputs["IncludeHiddenFiles"] = "include-hidden-files";
|
||||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||||
var NoFileOptions;
|
var NoFileOptions;
|
||||||
(function (NoFileOptions) {
|
(function (NoFileOptions) {
|
||||||
@ -125878,6 +126100,7 @@ function getInputs() {
|
|||||||
const name = core.getInput(constants_1.Inputs.Name);
|
const name = core.getInput(constants_1.Inputs.Name);
|
||||||
const path = core.getInput(constants_1.Inputs.Path, { required: true });
|
const path = core.getInput(constants_1.Inputs.Path, { required: true });
|
||||||
const overwrite = core.getBooleanInput(constants_1.Inputs.Overwrite);
|
const overwrite = core.getBooleanInput(constants_1.Inputs.Overwrite);
|
||||||
|
const includeHiddenFiles = core.getBooleanInput(constants_1.Inputs.IncludeHiddenFiles);
|
||||||
const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound);
|
const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound);
|
||||||
const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound];
|
const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound];
|
||||||
if (!noFileBehavior) {
|
if (!noFileBehavior) {
|
||||||
@ -125887,7 +126110,8 @@ function getInputs() {
|
|||||||
artifactName: name,
|
artifactName: name,
|
||||||
searchPath: path,
|
searchPath: path,
|
||||||
ifNoFilesFound: noFileBehavior,
|
ifNoFilesFound: noFileBehavior,
|
||||||
overwrite: overwrite
|
overwrite: overwrite,
|
||||||
|
includeHiddenFiles: includeHiddenFiles
|
||||||
};
|
};
|
||||||
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
||||||
if (retentionDaysStr) {
|
if (retentionDaysStr) {
|
||||||
@ -125976,7 +126200,7 @@ function deleteArtifactIfExists(artifactName) {
|
|||||||
function run() {
|
function run() {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const inputs = (0, input_helper_1.getInputs)();
|
const inputs = (0, input_helper_1.getInputs)();
|
||||||
const searchResult = yield (0, search_1.findFilesToUpload)(inputs.searchPath);
|
const searchResult = yield (0, search_1.findFilesToUpload)(inputs.searchPath, inputs.includeHiddenFiles);
|
||||||
if (searchResult.filesToUpload.length === 0) {
|
if (searchResult.filesToUpload.length === 0) {
|
||||||
// No files were found, different use cases warrant different types of behavior if nothing is found
|
// No files were found, different use cases warrant different types of behavior if nothing is found
|
||||||
switch (inputs.ifNoFilesFound) {
|
switch (inputs.ifNoFilesFound) {
|
||||||
@ -135987,7 +136211,7 @@ module.exports = index;
|
|||||||
/***/ ((module) => {
|
/***/ ((module) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.6","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
- [Multiple uploads to the same named Artifact](#multiple-uploads-to-the-same-named-artifact)
|
- [Multiple uploads to the same named Artifact](#multiple-uploads-to-the-same-named-artifact)
|
||||||
- [Overwriting an Artifact](#overwriting-an-artifact)
|
- [Overwriting an Artifact](#overwriting-an-artifact)
|
||||||
- [Merging multiple artifacts](#merging-multiple-artifacts)
|
- [Merging multiple artifacts](#merging-multiple-artifacts)
|
||||||
|
- [Hidden files](#hidden-files)
|
||||||
|
|
||||||
Several behavioral differences exist between Artifact actions `v3` and below vs `v4`. This document outlines common scenarios in `v3`, and how they would be handled in `v4`.
|
Several behavioral differences exist between Artifact actions `v3` and below vs `v4`. This document outlines common scenarios in `v3`, and how they would be handled in `v4`.
|
||||||
|
|
||||||
@ -207,3 +208,41 @@ jobs:
|
|||||||
```
|
```
|
||||||
|
|
||||||
Note that this will download all artifacts to a temporary directory and reupload them as a single artifact. For more information on inputs and other use cases for `actions/upload-artifact/merge@v4`, see [the action documentation](../merge/README.md).
|
Note that this will download all artifacts to a temporary directory and reupload them as a single artifact. For more information on inputs and other use cases for `actions/upload-artifact/merge@v4`, see [the action documentation](../merge/README.md).
|
||||||
|
|
||||||
|
## Hidden Files
|
||||||
|
|
||||||
|
By default, hidden files are ignored by this action to avoid unintentionally uploading sensitive
|
||||||
|
information.
|
||||||
|
|
||||||
|
In versions of this action before v4.4.0, these hidden files were included by default.
|
||||||
|
|
||||||
|
If you need to upload hidden files, you can use the `include-hidden-files` input.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
jobs:
|
||||||
|
upload:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Create a Hidden File
|
||||||
|
run: echo "hello from a hidden file" > .hidden-file.txt
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
path: .hidden-file.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```diff
|
||||||
|
jobs:
|
||||||
|
upload:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Create a Hidden File
|
||||||
|
run: echo "hello from a hidden file" > .hidden-file.txt
|
||||||
|
- name: Upload Artifact
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
+ uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
path: .hidden-file.txt
|
||||||
|
+ include-hidden-files: true
|
||||||
|
```
|
@ -36,6 +36,11 @@ inputs:
|
|||||||
If true, the artifacts that were merged will be deleted.
|
If true, the artifacts that were merged will be deleted.
|
||||||
If false, the artifacts will still exist.
|
If false, the artifacts will still exist.
|
||||||
default: 'false'
|
default: 'false'
|
||||||
|
include-hidden-files:
|
||||||
|
description: >
|
||||||
|
If true, hidden files will be included in the merged artifact.
|
||||||
|
If false, hidden files will be excluded from the merged artifact.
|
||||||
|
default: 'false'
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
artifact-id:
|
artifact-id:
|
||||||
|
76
package-lock.json
generated
76
package-lock.json
generated
@ -1,18 +1,18 @@
|
|||||||
{
|
{
|
||||||
"name": "upload-artifact",
|
"name": "upload-artifact",
|
||||||
"version": "4.3.3",
|
"version": "4.4.0",
|
||||||
"lockfileVersion": 2,
|
"lockfileVersion": 2,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "upload-artifact",
|
"name": "upload-artifact",
|
||||||
"version": "4.3.3",
|
"version": "4.4.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^2.1.6",
|
"@actions/artifact": "2.1.8",
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@actions/github": "^6.0.0",
|
"@actions/github": "^6.0.0",
|
||||||
"@actions/glob": "^0.3.0",
|
"@actions/glob": "^0.5.0",
|
||||||
"@actions/io": "^1.1.2",
|
"@actions/io": "^1.1.2",
|
||||||
"minimatch": "^9.0.3"
|
"minimatch": "^9.0.3"
|
||||||
},
|
},
|
||||||
@ -34,9 +34,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/artifact": {
|
"node_modules/@actions/artifact": {
|
||||||
"version": "2.1.6",
|
"version": "2.1.8",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.6.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz",
|
||||||
"integrity": "sha512-IYdauOIXyCMsGaNlqiTkNHlPwWOmRiJTMLOlLZY/S8mnWbkxXprlks3aV2a4PHeDEHg4MVC6+2y8rtVnEG5uPA==",
|
"integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/github": "^5.1.1",
|
"@actions/github": "^5.1.1",
|
||||||
@ -191,11 +191,11 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/@actions/glob": {
|
"node_modules/@actions/glob": {
|
||||||
"version": "0.3.0",
|
"version": "0.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.5.0.tgz",
|
||||||
"integrity": "sha512-tJP1ZhF87fd6LBnaXWlahkyvdgvsLl7WnreW1EZaC8JWjpMXmzqWzQVe/IEYslrkT9ymibVrKyJN4UMD7uQM2w==",
|
"integrity": "sha512-tST2rjPvJLRZLuT9NMUtyBjvj9Yo0MiJS3ow004slMvm8GFM+Zv9HvMJ7HWzfUyJnGrJvDsYkWBaaG3YKXRtCw==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.9.1",
|
||||||
"minimatch": "^3.0.4"
|
"minimatch": "^3.0.4"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -2838,12 +2838,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/braces": {
|
"node_modules/braces": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"fill-range": "^7.0.1"
|
"fill-range": "^7.1.1"
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=8"
|
"node": ">=8"
|
||||||
@ -4275,9 +4275,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/fill-range": {
|
"node_modules/fill-range": {
|
||||||
"version": "7.0.1",
|
"version": "7.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"to-regex-range": "^5.0.1"
|
"to-regex-range": "^5.0.1"
|
||||||
@ -7584,9 +7584,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/undici": {
|
"node_modules/undici": {
|
||||||
"version": "5.28.2",
|
"version": "5.28.4",
|
||||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz",
|
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
|
||||||
"integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==",
|
"integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@fastify/busboy": "^2.0.0"
|
"@fastify/busboy": "^2.0.0"
|
||||||
},
|
},
|
||||||
@ -7902,9 +7902,9 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": {
|
"@actions/artifact": {
|
||||||
"version": "2.1.6",
|
"version": "2.1.8",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.6.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz",
|
||||||
"integrity": "sha512-IYdauOIXyCMsGaNlqiTkNHlPwWOmRiJTMLOlLZY/S8mnWbkxXprlks3aV2a4PHeDEHg4MVC6+2y8rtVnEG5uPA==",
|
"integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/github": "^5.1.1",
|
"@actions/github": "^5.1.1",
|
||||||
@ -8036,11 +8036,11 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@actions/glob": {
|
"@actions/glob": {
|
||||||
"version": "0.3.0",
|
"version": "0.5.0",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.3.0.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/glob/-/glob-0.5.0.tgz",
|
||||||
"integrity": "sha512-tJP1ZhF87fd6LBnaXWlahkyvdgvsLl7WnreW1EZaC8JWjpMXmzqWzQVe/IEYslrkT9ymibVrKyJN4UMD7uQM2w==",
|
"integrity": "sha512-tST2rjPvJLRZLuT9NMUtyBjvj9Yo0MiJS3ow004slMvm8GFM+Zv9HvMJ7HWzfUyJnGrJvDsYkWBaaG3YKXRtCw==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.9.1",
|
||||||
"minimatch": "^3.0.4"
|
"minimatch": "^3.0.4"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@ -10102,12 +10102,12 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"braces": {
|
"braces": {
|
||||||
"version": "3.0.2",
|
"version": "3.0.3",
|
||||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
||||||
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
|
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"fill-range": "^7.0.1"
|
"fill-range": "^7.1.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"browserslist": {
|
"browserslist": {
|
||||||
@ -11196,9 +11196,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"fill-range": {
|
"fill-range": {
|
||||||
"version": "7.0.1",
|
"version": "7.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
|
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
||||||
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
|
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"requires": {
|
"requires": {
|
||||||
"to-regex-range": "^5.0.1"
|
"to-regex-range": "^5.0.1"
|
||||||
@ -13638,9 +13638,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"undici": {
|
"undici": {
|
||||||
"version": "5.28.2",
|
"version": "5.28.4",
|
||||||
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz",
|
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
|
||||||
"integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==",
|
"integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@fastify/busboy": "^2.0.0"
|
"@fastify/busboy": "^2.0.0"
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "upload-artifact",
|
"name": "upload-artifact",
|
||||||
"version": "4.3.3",
|
"version": "4.4.0",
|
||||||
"description": "Upload an Actions Artifact in a workflow run",
|
"description": "Upload an Actions Artifact in a workflow run",
|
||||||
"main": "dist/upload/index.js",
|
"main": "dist/upload/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -29,10 +29,10 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/artifact": "^2.1.6",
|
"@actions/artifact": "2.1.8",
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
"@actions/github": "^6.0.0",
|
"@actions/github": "^6.0.0",
|
||||||
"@actions/glob": "^0.3.0",
|
"@actions/glob": "^0.5.0",
|
||||||
"@actions/io": "^1.1.2",
|
"@actions/io": "^1.1.2",
|
||||||
"minimatch": "^9.0.3"
|
"minimatch": "^9.0.3"
|
||||||
},
|
},
|
||||||
|
@ -5,5 +5,6 @@ export enum Inputs {
|
|||||||
SeparateDirectories = 'separate-directories',
|
SeparateDirectories = 'separate-directories',
|
||||||
RetentionDays = 'retention-days',
|
RetentionDays = 'retention-days',
|
||||||
CompressionLevel = 'compression-level',
|
CompressionLevel = 'compression-level',
|
||||||
DeleteMerged = 'delete-merged'
|
DeleteMerged = 'delete-merged',
|
||||||
|
IncludeHiddenFiles = 'include-hidden-files'
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,7 @@ export function getInputs(): MergeInputs {
|
|||||||
const pattern = core.getInput(Inputs.Pattern, {required: true})
|
const pattern = core.getInput(Inputs.Pattern, {required: true})
|
||||||
const separateDirectories = core.getBooleanInput(Inputs.SeparateDirectories)
|
const separateDirectories = core.getBooleanInput(Inputs.SeparateDirectories)
|
||||||
const deleteMerged = core.getBooleanInput(Inputs.DeleteMerged)
|
const deleteMerged = core.getBooleanInput(Inputs.DeleteMerged)
|
||||||
|
const includeHiddenFiles = core.getBooleanInput(Inputs.IncludeHiddenFiles)
|
||||||
|
|
||||||
const inputs = {
|
const inputs = {
|
||||||
name,
|
name,
|
||||||
@ -17,7 +18,8 @@ export function getInputs(): MergeInputs {
|
|||||||
separateDirectories,
|
separateDirectories,
|
||||||
deleteMerged,
|
deleteMerged,
|
||||||
retentionDays: 0,
|
retentionDays: 0,
|
||||||
compressionLevel: 6
|
compressionLevel: 6,
|
||||||
|
includeHiddenFiles
|
||||||
} as MergeInputs
|
} as MergeInputs
|
||||||
|
|
||||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||||
|
@ -62,7 +62,10 @@ export async function run(): Promise<void> {
|
|||||||
options.compressionLevel = inputs.compressionLevel
|
options.compressionLevel = inputs.compressionLevel
|
||||||
}
|
}
|
||||||
|
|
||||||
const searchResult = await findFilesToUpload(tmpDir)
|
const searchResult = await findFilesToUpload(
|
||||||
|
tmpDir,
|
||||||
|
inputs.includeHiddenFiles
|
||||||
|
)
|
||||||
|
|
||||||
await uploadArtifact(
|
await uploadArtifact(
|
||||||
inputs.name,
|
inputs.name,
|
||||||
|
@ -30,4 +30,9 @@ export interface MergeInputs {
|
|||||||
* If false, the artifacts will be merged into the root of the destination.
|
* If false, the artifacts will be merged into the root of the destination.
|
||||||
*/
|
*/
|
||||||
separateDirectories: boolean
|
separateDirectories: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether or not to include hidden files in the artifact
|
||||||
|
*/
|
||||||
|
includeHiddenFiles: boolean
|
||||||
}
|
}
|
||||||
|
@ -11,11 +11,12 @@ export interface SearchResult {
|
|||||||
rootDirectory: string
|
rootDirectory: string
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDefaultGlobOptions(): glob.GlobOptions {
|
function getDefaultGlobOptions(includeHiddenFiles: boolean): glob.GlobOptions {
|
||||||
return {
|
return {
|
||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true,
|
||||||
|
excludeHiddenFiles: !includeHiddenFiles
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,12 +81,12 @@ function getMultiPathLCA(searchPaths: string[]): string {
|
|||||||
|
|
||||||
export async function findFilesToUpload(
|
export async function findFilesToUpload(
|
||||||
searchPath: string,
|
searchPath: string,
|
||||||
globOptions?: glob.GlobOptions
|
includeHiddenFiles?: boolean
|
||||||
): Promise<SearchResult> {
|
): Promise<SearchResult> {
|
||||||
const searchResults: string[] = []
|
const searchResults: string[] = []
|
||||||
const globber = await glob.create(
|
const globber = await glob.create(
|
||||||
searchPath,
|
searchPath,
|
||||||
globOptions || getDefaultGlobOptions()
|
getDefaultGlobOptions(includeHiddenFiles || false)
|
||||||
)
|
)
|
||||||
const rawSearchResults: string[] = await globber.glob()
|
const rawSearchResults: string[] = await globber.glob()
|
||||||
|
|
||||||
|
@ -5,7 +5,8 @@ export enum Inputs {
|
|||||||
IfNoFilesFound = 'if-no-files-found',
|
IfNoFilesFound = 'if-no-files-found',
|
||||||
RetentionDays = 'retention-days',
|
RetentionDays = 'retention-days',
|
||||||
CompressionLevel = 'compression-level',
|
CompressionLevel = 'compression-level',
|
||||||
Overwrite = 'overwrite'
|
Overwrite = 'overwrite',
|
||||||
|
IncludeHiddenFiles = 'include-hidden-files'
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum NoFileOptions {
|
export enum NoFileOptions {
|
||||||
|
@ -9,6 +9,7 @@ export function getInputs(): UploadInputs {
|
|||||||
const name = core.getInput(Inputs.Name)
|
const name = core.getInput(Inputs.Name)
|
||||||
const path = core.getInput(Inputs.Path, {required: true})
|
const path = core.getInput(Inputs.Path, {required: true})
|
||||||
const overwrite = core.getBooleanInput(Inputs.Overwrite)
|
const overwrite = core.getBooleanInput(Inputs.Overwrite)
|
||||||
|
const includeHiddenFiles = core.getBooleanInput(Inputs.IncludeHiddenFiles)
|
||||||
|
|
||||||
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
|
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
|
||||||
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
|
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
|
||||||
@ -27,7 +28,8 @@ export function getInputs(): UploadInputs {
|
|||||||
artifactName: name,
|
artifactName: name,
|
||||||
searchPath: path,
|
searchPath: path,
|
||||||
ifNoFilesFound: noFileBehavior,
|
ifNoFilesFound: noFileBehavior,
|
||||||
overwrite: overwrite
|
overwrite: overwrite,
|
||||||
|
includeHiddenFiles: includeHiddenFiles
|
||||||
} as UploadInputs
|
} as UploadInputs
|
||||||
|
|
||||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||||
|
@ -24,7 +24,10 @@ async function deleteArtifactIfExists(artifactName: string): Promise<void> {
|
|||||||
|
|
||||||
export async function run(): Promise<void> {
|
export async function run(): Promise<void> {
|
||||||
const inputs = getInputs()
|
const inputs = getInputs()
|
||||||
const searchResult = await findFilesToUpload(inputs.searchPath)
|
const searchResult = await findFilesToUpload(
|
||||||
|
inputs.searchPath,
|
||||||
|
inputs.includeHiddenFiles
|
||||||
|
)
|
||||||
if (searchResult.filesToUpload.length === 0) {
|
if (searchResult.filesToUpload.length === 0) {
|
||||||
// No files were found, different use cases warrant different types of behavior if nothing is found
|
// No files were found, different use cases warrant different types of behavior if nothing is found
|
||||||
switch (inputs.ifNoFilesFound) {
|
switch (inputs.ifNoFilesFound) {
|
||||||
|
@ -30,4 +30,9 @@ export interface UploadInputs {
|
|||||||
* Whether or not to replace an existing artifact with the same name
|
* Whether or not to replace an existing artifact with the same name
|
||||||
*/
|
*/
|
||||||
overwrite: boolean
|
overwrite: boolean
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether or not to include hidden files in the artifact
|
||||||
|
*/
|
||||||
|
includeHiddenFiles: boolean
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user