Compare commits

...

19 Commits

Author SHA1 Message Date
f626d210e8 linting 2024-08-15 20:37:17 -04:00
fa37431cef npm ci and npm run release 2024-08-15 20:34:01 -04:00
3412bb46a4 Exclude the .git directory by default 2024-08-15 20:29:20 -04:00
834a144ee9 Merge pull request #594 from actions/robherley/4.3.6
Revert to @actions/artifact 2.1.8
2024-08-06 10:41:08 -04:00
134dcf33c0 v4.3.6 2024-08-06 10:24:34 -04:00
73a0b9c954 revert back to @actions/artifact 2.1.8 2024-08-06 10:23:43 -04:00
89ef406dd8 Merge pull request #588 from actions/robherley/4.3.5
Bump @actions/artifact to v2.1.9
2024-08-02 09:59:06 -04:00
23d796df36 license updates 2024-08-01 15:50:32 -04:00
e445c64bc2 bump @actions/artifact to v2.1.9 2024-08-01 15:46:49 -04:00
0b2256b8c0 Merge pull request #584 from actions/robherley/bump-pkgs
Update @actions/artifact version, bump dependencies
2024-07-05 11:11:13 -04:00
488dcefb9b licensed cache 2024-07-05 14:39:15 +00:00
04c51f5766 ncc 2024-07-05 13:55:51 +00:00
32a9e276a8 bump @actions/artifact and npm audit 2024-07-05 13:51:02 +00:00
552bf3722c new version 2024-04-23 11:22:42 -07:00
79616d2ded Merge pull request #565 from actions/eggyhead/use-artifact-v2.1.6
updating toolkit artifact dependency to 2.1.6
2024-04-22 09:19:03 -07:00
65462800fd updating package version 2024-04-22 08:10:47 -07:00
c004fb4bf6 Merge branch 'main' into eggyhead/use-artifact-v2.1.6 2024-04-22 08:08:40 -07:00
90aba496fc updating toolkit artifact dependency to 2.1.6 2024-04-22 08:06:58 -07:00
b06cde36fc Merge pull request #563 from actions/eggyhead/release-4.3.2
updating to release 4.3.2
2024-04-18 08:13:48 -07:00
20 changed files with 811 additions and 235 deletions

View File

@ -1,6 +1,6 @@
---
name: "@actions/artifact"
version: 2.1.5
version: 2.1.8
type: npm
summary:
homepage:

View File

@ -16,6 +16,7 @@ See also [download-artifact](https://github.com/actions/download-artifact).
- [Breaking Changes](#breaking-changes)
- [Usage](#usage)
- [Inputs](#inputs)
- [Uploading the `.git` directory](#uploading-the-git-directory)
- [Outputs](#outputs)
- [Examples](#examples)
- [Upload an Individual File](#upload-an-individual-file)
@ -64,6 +65,7 @@ There is also a new sub-action, `actions/upload-artifact/merge`. For more info,
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once. Otherwise you _will_ encounter an error.
3. Limit of Artifacts for an individual job. Each job in a workflow run now has a limit of 500 artifacts.
4. With `v4.4` and later, the `.git` directory is excluded by default.
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
@ -109,6 +111,30 @@ For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
overwrite:
```
#### Uploading the `.git` directory
By default, files in a `.git` directory are ignored in the uploaded artifact.
This is intended to prevent accidentally uploading Git credentials into an artifact that could then
be extracted.
If files in the `.git` directory are needed, ensure that `actions/checkout` is being used with
`persist-credentials: false`.
```yaml
jobs:
upload:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false # Ensure credentials are not saved in `.git/config`
- uses: actions/upload-artifact@v4
with:
path: .
include-git-directory: true
```
### Outputs
| Name | Description | Example |

View File

@ -61,6 +61,12 @@ const lonelyFilePath = path.join(
'lonely-file.txt'
)
const gitConfigPath = path.join(root, '.git', 'config')
const gitHeadPath = path.join(root, '.git', 'HEAD')
const nestedGitConfigPath = path.join(root, 'repository-name', '.git', 'config')
const nestedGitHeadPath = path.join(root, 'repository-name', '.git', 'HEAD')
describe('Search', () => {
beforeAll(async () => {
// mock all output so that there is less noise when running tests
@ -93,6 +99,11 @@ describe('Search', () => {
recursive: true
})
await fs.mkdir(path.join(root, '.git'))
await fs.mkdir(path.join(root, 'repository-name', '.git'), {
recursive: true
})
await fs.writeFile(searchItem1Path, 'search item1 file')
await fs.writeFile(searchItem2Path, 'search item2 file')
await fs.writeFile(searchItem3Path, 'search item3 file')
@ -110,9 +121,17 @@ describe('Search', () => {
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
await fs.writeFile(lonelyFilePath, 'all by itself')
await fs.writeFile(gitConfigPath, 'git config file')
await fs.writeFile(gitHeadPath, 'git head file')
await fs.writeFile(nestedGitConfigPath, 'nested git config file')
await fs.writeFile(nestedGitHeadPath, 'nested git head file')
/*
Directory structure of files that get created:
root/
.git/
config
HEAD
folder-a/
folder-b/
folder-c/
@ -136,6 +155,10 @@ describe('Search', () => {
folder-j/
folder-k/
lonely-file.txt
repository-name/
.git/
config
HEAD
search-item5.txt
*/
})
@ -352,4 +375,18 @@ describe('Search', () => {
)
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
})
it('Excludes .git directory by default', async () => {
const searchResult = await findFilesToUpload(root)
expect(searchResult.filesToUpload.length).toEqual(13)
expect(searchResult.filesToUpload).not.toContain(gitConfigPath)
})
it('Includes .git directory when includeGitDirectory is true', async () => {
const searchResult = await findFilesToUpload(root, {
includeGitDirectory: true
})
expect(searchResult.filesToUpload.length).toEqual(17)
expect(searchResult.filesToUpload).toContain(gitConfigPath)
})
})

View File

@ -40,6 +40,9 @@ inputs:
If false, the action will fail if an artifact for the given name already exists.
Does not fail if the artifact does not exist.
default: 'false'
include-git-directory:
description: 'Include files in the .git directory in the artifact.'
default: 'false'
outputs:
artifact-id:

385
dist/merge/index.js vendored
View File

@ -2328,9 +2328,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292));
const stream = __importStar(__nccwpck_require__(12781));
const fs_1 = __nccwpck_require__(57147);
const path = __importStar(__nccwpck_require__(71017));
const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255));
@ -2371,9 +2368,6 @@ function streamExtract(url, directory) {
return;
}
catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
}
retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying
@ -2396,8 +2390,6 @@ function streamExtractExternal(url, directory) {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
};
const timer = setTimeout(timerFn, timeout);
const createdDirectories = new Set();
createdDirectories.add(directory);
response.message
.on('data', () => {
timer.refresh();
@ -2407,47 +2399,11 @@ function streamExtractExternal(url, directory) {
clearTimeout(timer);
reject(error);
})
.pipe(unzip_stream_1.default.Parse())
.pipe(new stream.Transform({
objectMode: true,
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
const fullPath = path.normalize(path.join(directory, entry.path));
if (!directory.endsWith(path.sep)) {
directory += path.sep;
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`));
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath);
yield resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain();
callback();
});
}
else {
entry.autodrain();
callback();
}
}
else {
core.info(`Extracting artifact entry: ${fullPath}`);
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath));
yield resolveOrCreateDirectory(path.dirname(fullPath));
}
const writeStream = (0, fs_1.createWriteStream)(fullPath);
writeStream.on('finish', callback);
writeStream.on('error', reject);
entry.pipe(writeStream);
}
})
}))
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
.pipe(unzip_stream_1.default.Extract({ path: directory }))
.on('close', () => {
clearTimeout(timer);
resolve();
}))
})
.on('error', (error) => {
reject(error);
});
@ -2964,7 +2920,6 @@ class ArtifactHttpClient {
catch (error) {
if (error instanceof SyntaxError) {
(0, core_1.debug)(`Raw Body: ${rawBody}`);
throw error;
}
if (error instanceof errors_1.UsageError) {
throw error;
@ -3070,8 +3025,9 @@ function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
const isGheHost = hostname.endsWith('.GHE.COM');
const isLocalHost = hostname.endsWith('.LOCALHOST');
return !isGitHubHost && !isGheHost && !isLocalHost;
}
exports.isGhes = isGhes;
function getGitHubWorkspaceDir() {
@ -100989,6 +100945,132 @@ function onConnectTimeout (socket) {
module.exports = buildConnector
/***/ }),
/***/ 14462:
/***/ ((module) => {
"use strict";
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = {}
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
]
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord
}
/***/ }),
/***/ 48045:
@ -101821,6 +101903,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045)
const { Blob } = __nccwpck_require__(14300)
const nodeUtil = __nccwpck_require__(73837)
const { stringify } = __nccwpck_require__(63477)
const { headerNameLowerCasedRecord } = __nccwpck_require__(14462)
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
@ -102030,6 +102113,15 @@ function parseKeepAliveTimeout (val) {
return m ? parseInt(m[1], 10) * 1000 : null
}
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
}
function parseHeaders (headers, obj = {}) {
// For H2 support
if (!Array.isArray(headers)) return headers
@ -102301,6 +102393,7 @@ module.exports = {
isIterable,
isAsyncIterable,
isDestroyed,
headerNameToString,
parseRawHeaders,
parseHeaders,
parseKeepAliveTimeout,
@ -106437,6 +106530,9 @@ function httpRedirectFetch (fetchParams, response) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request.headersList.delete('authorization')
// https://fetch.spec.whatwg.org/#authentication-entries
request.headersList.delete('proxy-authorization', true)
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
request.headersList.delete('cookie')
request.headersList.delete('host')
@ -108945,14 +109041,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398
const assert = __nccwpck_require__(39491)
const { isUint8Array } = __nccwpck_require__(29830)
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */
let crypto
try {
crypto = __nccwpck_require__(6113)
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch {
}
function responseURL (response) {
@ -109480,66 +109580,56 @@ function bytesMatch (bytes, metadataList) {
return true
}
// 3. If parsedMetadata is the empty set, return true.
// 3. If response is not eligible for integrity validation, return false.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) {
return true
}
// 4. Let metadata be the result of getting the strongest
// 5. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
// get the strongest algorithm
const strongest = list[0].algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list.filter((item) => item.algo === strongest)
const strongest = getStrongestMetadata(parsedMetadata)
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
// 5. For each item in metadata:
// 6. For each item in metadata:
for (const item of metadata) {
// 1. Let algorithm be the alg component of item.
const algorithm = item.algo
// 2. Let expectedValue be the val component of item.
let expectedValue = item.hash
const expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec.
if (expectedValue.endsWith('==')) {
expectedValue = expectedValue.slice(0, -2)
}
// 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue.endsWith('==')) {
actualValue = actualValue.slice(0, -2)
if (actualValue[actualValue.length - 1] === '=') {
if (actualValue[actualValue.length - 2] === '=') {
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
}
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if (actualValue === expectedValue) {
return true
}
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
if (actualBase64URL.endsWith('==')) {
actualBase64URL = actualBase64URL.slice(0, -2)
}
if (actualBase64URL === expectedValue) {
if (compareBase64Mixed(actualValue, expectedValue)) {
return true
}
}
// 6. Return false.
// 7. Return false.
return false
}
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
/**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
@ -109553,8 +109643,6 @@ function parseMetadata (metadata) {
// 2. Let empty be equal to true.
let empty = true
const supportedHashes = crypto.getHashes()
// 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) {
// 1. Set empty to false.
@ -109564,7 +109652,11 @@ function parseMetadata (metadata) {
const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token.
if (parsedToken === null || parsedToken.groups === undefined) {
if (
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not
@ -109573,11 +109665,11 @@ function parseMetadata (metadata) {
}
// 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo
const algorithm = parsedToken.groups.algo.toLowerCase()
// 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result.
if (supportedHashes.includes(algorithm.toLowerCase())) {
if (supportedHashes.includes(algorithm)) {
result.push(parsedToken.groups)
}
}
@ -109590,6 +109682,82 @@ function parseMetadata (metadata) {
return result
}
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO
@ -110005,7 +110173,8 @@ module.exports = {
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes,
normalizeMethodRecord
normalizeMethodRecord,
parseMetadata
}
@ -112092,12 +112261,17 @@ function parseLocation (statusCode, headers) {
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
return (
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
)
if (header.length === 4) {
return util.headerNameToString(header) === 'host'
}
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
return true
}
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
}
// https://tools.ietf.org/html/rfc7231#section-6.4
@ -125553,6 +125727,7 @@ var Inputs;
Inputs["RetentionDays"] = "retention-days";
Inputs["CompressionLevel"] = "compression-level";
Inputs["DeleteMerged"] = "delete-merged";
Inputs["IncludeGitDirectory"] = "include-git-directory";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
@ -125636,13 +125811,15 @@ function getInputs() {
const pattern = core.getInput(constants_1.Inputs.Pattern, { required: true });
const separateDirectories = core.getBooleanInput(constants_1.Inputs.SeparateDirectories);
const deleteMerged = core.getBooleanInput(constants_1.Inputs.DeleteMerged);
const includeGitDirectory = core.getBooleanInput(constants_1.Inputs.IncludeGitDirectory);
const inputs = {
name,
pattern,
separateDirectories,
deleteMerged,
retentionDays: 0,
compressionLevel: 6
compressionLevel: 6,
includeGitDirectory
};
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
if (retentionDaysStr) {
@ -125758,7 +125935,9 @@ function run() {
if (typeof inputs.compressionLevel !== 'undefined') {
options.compressionLevel = inputs.compressionLevel;
}
const searchResult = yield (0, search_1.findFilesToUpload)(tmpDir);
const searchResult = yield (0, search_1.findFilesToUpload)(tmpDir, {
includeGitDirectory: inputs.includeGitDirectory
});
yield (0, upload_artifact_1.uploadArtifact)(inputs.name, searchResult.filesToUpload, searchResult.rootDirectory, options);
core.info(`The ${artifacts.length} artifact(s) have been successfully merged!`);
if (inputs.deleteMerged) {
@ -125883,10 +126062,10 @@ function getMultiPathLCA(searchPaths) {
}
return path.join(...commonPaths);
}
function findFilesToUpload(searchPath, globOptions) {
function findFilesToUpload(searchPath, searchOptions) {
return __awaiter(this, void 0, void 0, function* () {
const searchResults = [];
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
const globber = yield glob.create(searchPath, getDefaultGlobOptions());
const rawSearchResults = yield globber.glob();
/*
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
@ -125902,6 +126081,10 @@ function findFilesToUpload(searchPath, globOptions) {
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
if (!fileStats.isDirectory()) {
(0, core_1.debug)(`File:${searchResult} was found using the provided searchPath`);
if (!(searchOptions === null || searchOptions === void 0 ? void 0 : searchOptions.includeGitDirectory) && inGitDirectory(searchResult)) {
(0, core_1.debug)(`Ignoring ${searchResult} because it is in the .git directory`);
continue;
}
searchResults.push(searchResult);
// detect any files that would be overwritten because of case insensitivity
if (set.has(searchResult.toLowerCase())) {
@ -125943,6 +126126,16 @@ function findFilesToUpload(searchPath, globOptions) {
});
}
exports.findFilesToUpload = findFilesToUpload;
function inGitDirectory(filePath) {
// The .git directory is a directory, so we need to check if the file path is a directory
// and if it is a .git directory
for (const part of filePath.split(path.sep)) {
if (part === '.git') {
return true;
}
}
return false;
}
/***/ }),
@ -135978,7 +136171,7 @@ module.exports = index;
/***/ ((module) => {
"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.5","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }),

385
dist/upload/index.js vendored
View File

@ -2328,9 +2328,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0;
const promises_1 = __importDefault(__nccwpck_require__(73292));
const stream = __importStar(__nccwpck_require__(12781));
const fs_1 = __nccwpck_require__(57147);
const path = __importStar(__nccwpck_require__(71017));
const github = __importStar(__nccwpck_require__(21260));
const core = __importStar(__nccwpck_require__(42186));
const httpClient = __importStar(__nccwpck_require__(96255));
@ -2371,9 +2368,6 @@ function streamExtract(url, directory) {
return;
}
catch (error) {
if (error.message.includes('Malformed extraction path')) {
throw new Error(`Artifact download failed with unretryable error: ${error.message}`);
}
retryCount++;
core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`);
// wait 5 seconds before retrying
@ -2396,8 +2390,6 @@ function streamExtractExternal(url, directory) {
response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`));
};
const timer = setTimeout(timerFn, timeout);
const createdDirectories = new Set();
createdDirectories.add(directory);
response.message
.on('data', () => {
timer.refresh();
@ -2407,47 +2399,11 @@ function streamExtractExternal(url, directory) {
clearTimeout(timer);
reject(error);
})
.pipe(unzip_stream_1.default.Parse())
.pipe(new stream.Transform({
objectMode: true,
transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () {
const fullPath = path.normalize(path.join(directory, entry.path));
if (!directory.endsWith(path.sep)) {
directory += path.sep;
}
if (!fullPath.startsWith(directory)) {
reject(new Error(`Malformed extraction path: ${fullPath}`));
}
if (entry.type === 'Directory') {
if (!createdDirectories.has(fullPath)) {
createdDirectories.add(fullPath);
yield resolveOrCreateDirectory(fullPath).then(() => {
entry.autodrain();
callback();
});
}
else {
entry.autodrain();
callback();
}
}
else {
core.info(`Extracting artifact entry: ${fullPath}`);
if (!createdDirectories.has(path.dirname(fullPath))) {
createdDirectories.add(path.dirname(fullPath));
yield resolveOrCreateDirectory(path.dirname(fullPath));
}
const writeStream = (0, fs_1.createWriteStream)(fullPath);
writeStream.on('finish', callback);
writeStream.on('error', reject);
entry.pipe(writeStream);
}
})
}))
.on('finish', () => __awaiter(this, void 0, void 0, function* () {
.pipe(unzip_stream_1.default.Extract({ path: directory }))
.on('close', () => {
clearTimeout(timer);
resolve();
}))
})
.on('error', (error) => {
reject(error);
});
@ -2964,7 +2920,6 @@ class ArtifactHttpClient {
catch (error) {
if (error instanceof SyntaxError) {
(0, core_1.debug)(`Raw Body: ${rawBody}`);
throw error;
}
if (error instanceof errors_1.UsageError) {
throw error;
@ -3070,8 +3025,9 @@ function isGhes() {
const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com');
const hostname = ghUrl.hostname.trimEnd().toUpperCase();
const isGitHubHost = hostname === 'GITHUB.COM';
const isGheHost = hostname.endsWith('.GHE.COM') || hostname.endsWith('.GHE.LOCALHOST');
return !isGitHubHost && !isGheHost;
const isGheHost = hostname.endsWith('.GHE.COM');
const isLocalHost = hostname.endsWith('.LOCALHOST');
return !isGitHubHost && !isGheHost && !isLocalHost;
}
exports.isGhes = isGhes;
function getGitHubWorkspaceDir() {
@ -100989,6 +100945,132 @@ function onConnectTimeout (socket) {
module.exports = buildConnector
/***/ }),
/***/ 14462:
/***/ ((module) => {
"use strict";
/** @type {Record<string, string | undefined>} */
const headerNameLowerCasedRecord = {}
// https://developer.mozilla.org/docs/Web/HTTP/Headers
const wellknownHeaderNames = [
'Accept',
'Accept-Encoding',
'Accept-Language',
'Accept-Ranges',
'Access-Control-Allow-Credentials',
'Access-Control-Allow-Headers',
'Access-Control-Allow-Methods',
'Access-Control-Allow-Origin',
'Access-Control-Expose-Headers',
'Access-Control-Max-Age',
'Access-Control-Request-Headers',
'Access-Control-Request-Method',
'Age',
'Allow',
'Alt-Svc',
'Alt-Used',
'Authorization',
'Cache-Control',
'Clear-Site-Data',
'Connection',
'Content-Disposition',
'Content-Encoding',
'Content-Language',
'Content-Length',
'Content-Location',
'Content-Range',
'Content-Security-Policy',
'Content-Security-Policy-Report-Only',
'Content-Type',
'Cookie',
'Cross-Origin-Embedder-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Date',
'Device-Memory',
'Downlink',
'ECT',
'ETag',
'Expect',
'Expect-CT',
'Expires',
'Forwarded',
'From',
'Host',
'If-Match',
'If-Modified-Since',
'If-None-Match',
'If-Range',
'If-Unmodified-Since',
'Keep-Alive',
'Last-Modified',
'Link',
'Location',
'Max-Forwards',
'Origin',
'Permissions-Policy',
'Pragma',
'Proxy-Authenticate',
'Proxy-Authorization',
'RTT',
'Range',
'Referer',
'Referrer-Policy',
'Refresh',
'Retry-After',
'Sec-WebSocket-Accept',
'Sec-WebSocket-Extensions',
'Sec-WebSocket-Key',
'Sec-WebSocket-Protocol',
'Sec-WebSocket-Version',
'Server',
'Server-Timing',
'Service-Worker-Allowed',
'Service-Worker-Navigation-Preload',
'Set-Cookie',
'SourceMap',
'Strict-Transport-Security',
'Supports-Loading-Mode',
'TE',
'Timing-Allow-Origin',
'Trailer',
'Transfer-Encoding',
'Upgrade',
'Upgrade-Insecure-Requests',
'User-Agent',
'Vary',
'Via',
'WWW-Authenticate',
'X-Content-Type-Options',
'X-DNS-Prefetch-Control',
'X-Frame-Options',
'X-Permitted-Cross-Domain-Policies',
'X-Powered-By',
'X-Requested-With',
'X-XSS-Protection'
]
for (let i = 0; i < wellknownHeaderNames.length; ++i) {
const key = wellknownHeaderNames[i]
const lowerCasedKey = key.toLowerCase()
headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] =
lowerCasedKey
}
// Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`.
Object.setPrototypeOf(headerNameLowerCasedRecord, null)
module.exports = {
wellknownHeaderNames,
headerNameLowerCasedRecord
}
/***/ }),
/***/ 48045:
@ -101821,6 +101903,7 @@ const { InvalidArgumentError } = __nccwpck_require__(48045)
const { Blob } = __nccwpck_require__(14300)
const nodeUtil = __nccwpck_require__(73837)
const { stringify } = __nccwpck_require__(63477)
const { headerNameLowerCasedRecord } = __nccwpck_require__(14462)
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v))
@ -102030,6 +102113,15 @@ function parseKeepAliveTimeout (val) {
return m ? parseInt(m[1], 10) * 1000 : null
}
/**
* Retrieves a header name and returns its lowercase value.
* @param {string | Buffer} value Header name
* @returns {string}
*/
function headerNameToString (value) {
return headerNameLowerCasedRecord[value] || value.toLowerCase()
}
function parseHeaders (headers, obj = {}) {
// For H2 support
if (!Array.isArray(headers)) return headers
@ -102301,6 +102393,7 @@ module.exports = {
isIterable,
isAsyncIterable,
isDestroyed,
headerNameToString,
parseRawHeaders,
parseHeaders,
parseKeepAliveTimeout,
@ -106437,6 +106530,9 @@ function httpRedirectFetch (fetchParams, response) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request.headersList.delete('authorization')
// https://fetch.spec.whatwg.org/#authentication-entries
request.headersList.delete('proxy-authorization', true)
// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
request.headersList.delete('cookie')
request.headersList.delete('host')
@ -108945,14 +109041,18 @@ const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(8398
const assert = __nccwpck_require__(39491)
const { isUint8Array } = __nccwpck_require__(29830)
let supportedHashes = []
// https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable
/** @type {import('crypto')|undefined} */
let crypto
try {
crypto = __nccwpck_require__(6113)
const possibleRelevantHashes = ['sha256', 'sha384', 'sha512']
supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash))
/* c8 ignore next 3 */
} catch {
}
function responseURL (response) {
@ -109480,66 +109580,56 @@ function bytesMatch (bytes, metadataList) {
return true
}
// 3. If parsedMetadata is the empty set, return true.
// 3. If response is not eligible for integrity validation, return false.
// TODO
// 4. If parsedMetadata is the empty set, return true.
if (parsedMetadata.length === 0) {
return true
}
// 4. Let metadata be the result of getting the strongest
// 5. Let metadata be the result of getting the strongest
// metadata from parsedMetadata.
const list = parsedMetadata.sort((c, d) => d.algo.localeCompare(c.algo))
// get the strongest algorithm
const strongest = list[0].algo
// get all entries that use the strongest algorithm; ignore weaker
const metadata = list.filter((item) => item.algo === strongest)
const strongest = getStrongestMetadata(parsedMetadata)
const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest)
// 5. For each item in metadata:
// 6. For each item in metadata:
for (const item of metadata) {
// 1. Let algorithm be the alg component of item.
const algorithm = item.algo
// 2. Let expectedValue be the val component of item.
let expectedValue = item.hash
const expectedValue = item.hash
// See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
// "be liberal with padding". This is annoying, and it's not even in the spec.
if (expectedValue.endsWith('==')) {
expectedValue = expectedValue.slice(0, -2)
}
// 3. Let actualValue be the result of applying algorithm to bytes.
let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
if (actualValue.endsWith('==')) {
actualValue = actualValue.slice(0, -2)
if (actualValue[actualValue.length - 1] === '=') {
if (actualValue[actualValue.length - 2] === '=') {
actualValue = actualValue.slice(0, -2)
} else {
actualValue = actualValue.slice(0, -1)
}
}
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if (actualValue === expectedValue) {
return true
}
let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
if (actualBase64URL.endsWith('==')) {
actualBase64URL = actualBase64URL.slice(0, -2)
}
if (actualBase64URL === expectedValue) {
if (compareBase64Mixed(actualValue, expectedValue)) {
return true
}
}
// 6. Return false.
// 7. Return false.
return false
}
// https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options
// https://www.w3.org/TR/CSP2/#source-list-syntax
// https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1
const parseHashWithOptions = /((?<algo>sha256|sha384|sha512)-(?<hash>[A-z0-9+/]{1}.*={0,2}))( +[\x21-\x7e]?)?/i
const parseHashWithOptions = /(?<algo>sha256|sha384|sha512)-((?<hash>[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i
/**
* @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata
@ -109553,8 +109643,6 @@ function parseMetadata (metadata) {
// 2. Let empty be equal to true.
let empty = true
const supportedHashes = crypto.getHashes()
// 3. For each token returned by splitting metadata on spaces:
for (const token of metadata.split(' ')) {
// 1. Set empty to false.
@ -109564,7 +109652,11 @@ function parseMetadata (metadata) {
const parsedToken = parseHashWithOptions.exec(token)
// 3. If token does not parse, continue to the next token.
if (parsedToken === null || parsedToken.groups === undefined) {
if (
parsedToken === null ||
parsedToken.groups === undefined ||
parsedToken.groups.algo === undefined
) {
// Note: Chromium blocks the request at this point, but Firefox
// gives a warning that an invalid integrity was given. The
// correct behavior is to ignore these, and subsequently not
@ -109573,11 +109665,11 @@ function parseMetadata (metadata) {
}
// 4. Let algorithm be the hash-algo component of token.
const algorithm = parsedToken.groups.algo
const algorithm = parsedToken.groups.algo.toLowerCase()
// 5. If algorithm is a hash function recognized by the user
// agent, add the parsed token to result.
if (supportedHashes.includes(algorithm.toLowerCase())) {
if (supportedHashes.includes(algorithm)) {
result.push(parsedToken.groups)
}
}
@ -109590,6 +109682,82 @@ function parseMetadata (metadata) {
return result
}
/**
* @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList
*/
function getStrongestMetadata (metadataList) {
// Let algorithm be the algo component of the first item in metadataList.
// Can be sha256
let algorithm = metadataList[0].algo
// If the algorithm is sha512, then it is the strongest
// and we can return immediately
if (algorithm[3] === '5') {
return algorithm
}
for (let i = 1; i < metadataList.length; ++i) {
const metadata = metadataList[i]
// If the algorithm is sha512, then it is the strongest
// and we can break the loop immediately
if (metadata.algo[3] === '5') {
algorithm = 'sha512'
break
// If the algorithm is sha384, then a potential sha256 or sha384 is ignored
} else if (algorithm[3] === '3') {
continue
// algorithm is sha256, check if algorithm is sha384 and if so, set it as
// the strongest
} else if (metadata.algo[3] === '3') {
algorithm = 'sha384'
}
}
return algorithm
}
function filterMetadataListByAlgorithm (metadataList, algorithm) {
if (metadataList.length === 1) {
return metadataList
}
let pos = 0
for (let i = 0; i < metadataList.length; ++i) {
if (metadataList[i].algo === algorithm) {
metadataList[pos++] = metadataList[i]
}
}
metadataList.length = pos
return metadataList
}
/**
* Compares two base64 strings, allowing for base64url
* in the second string.
*
* @param {string} actualValue always base64
* @param {string} expectedValue base64 or base64url
* @returns {boolean}
*/
function compareBase64Mixed (actualValue, expectedValue) {
if (actualValue.length !== expectedValue.length) {
return false
}
for (let i = 0; i < actualValue.length; ++i) {
if (actualValue[i] !== expectedValue[i]) {
if (
(actualValue[i] === '+' && expectedValue[i] === '-') ||
(actualValue[i] === '/' && expectedValue[i] === '_')
) {
continue
}
return false
}
}
return true
}
// https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request
function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) {
// TODO
@ -110005,7 +110173,8 @@ module.exports = {
urlHasHttpsScheme,
urlIsHttpHttpsScheme,
readAllBytes,
normalizeMethodRecord
normalizeMethodRecord,
parseMetadata
}
@ -112092,12 +112261,17 @@ function parseLocation (statusCode, headers) {
// https://tools.ietf.org/html/rfc7231#section-6.4.4
function shouldRemoveHeader (header, removeContent, unknownOrigin) {
return (
(header.length === 4 && header.toString().toLowerCase() === 'host') ||
(removeContent && header.toString().toLowerCase().indexOf('content-') === 0) ||
(unknownOrigin && header.length === 13 && header.toString().toLowerCase() === 'authorization') ||
(unknownOrigin && header.length === 6 && header.toString().toLowerCase() === 'cookie')
)
if (header.length === 4) {
return util.headerNameToString(header) === 'host'
}
if (removeContent && util.headerNameToString(header).startsWith('content-')) {
return true
}
if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) {
const name = util.headerNameToString(header)
return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization'
}
return false
}
// https://tools.ietf.org/html/rfc7231#section-6.4
@ -125641,10 +125815,10 @@ function getMultiPathLCA(searchPaths) {
}
return path.join(...commonPaths);
}
function findFilesToUpload(searchPath, globOptions) {
function findFilesToUpload(searchPath, searchOptions) {
return __awaiter(this, void 0, void 0, function* () {
const searchResults = [];
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
const globber = yield glob.create(searchPath, getDefaultGlobOptions());
const rawSearchResults = yield globber.glob();
/*
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
@ -125660,6 +125834,10 @@ function findFilesToUpload(searchPath, globOptions) {
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
if (!fileStats.isDirectory()) {
(0, core_1.debug)(`File:${searchResult} was found using the provided searchPath`);
if (!(searchOptions === null || searchOptions === void 0 ? void 0 : searchOptions.includeGitDirectory) && inGitDirectory(searchResult)) {
(0, core_1.debug)(`Ignoring ${searchResult} because it is in the .git directory`);
continue;
}
searchResults.push(searchResult);
// detect any files that would be overwritten because of case insensitivity
if (set.has(searchResult.toLowerCase())) {
@ -125701,6 +125879,16 @@ function findFilesToUpload(searchPath, globOptions) {
});
}
exports.findFilesToUpload = findFilesToUpload;
function inGitDirectory(filePath) {
// The .git directory is a directory, so we need to check if the file path is a directory
// and if it is a .git directory
for (const part of filePath.split(path.sep)) {
if (part === '.git') {
return true;
}
}
return false;
}
/***/ }),
@ -125782,6 +125970,7 @@ var Inputs;
Inputs["RetentionDays"] = "retention-days";
Inputs["CompressionLevel"] = "compression-level";
Inputs["Overwrite"] = "overwrite";
Inputs["IncludeGitDirectory"] = "include-git-directory";
})(Inputs = exports.Inputs || (exports.Inputs = {}));
var NoFileOptions;
(function (NoFileOptions) {
@ -125879,6 +126068,7 @@ function getInputs() {
const name = core.getInput(constants_1.Inputs.Name);
const path = core.getInput(constants_1.Inputs.Path, { required: true });
const overwrite = core.getBooleanInput(constants_1.Inputs.Overwrite);
const includeGitDirectory = core.getBooleanInput(constants_1.Inputs.IncludeGitDirectory);
const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound);
const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound];
if (!noFileBehavior) {
@ -125888,7 +126078,8 @@ function getInputs() {
artifactName: name,
searchPath: path,
ifNoFilesFound: noFileBehavior,
overwrite: overwrite
overwrite: overwrite,
includeGitDirectory: includeGitDirectory
};
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
if (retentionDaysStr) {
@ -125977,7 +126168,9 @@ function deleteArtifactIfExists(artifactName) {
function run() {
return __awaiter(this, void 0, void 0, function* () {
const inputs = (0, input_helper_1.getInputs)();
const searchResult = yield (0, search_1.findFilesToUpload)(inputs.searchPath);
const searchResult = yield (0, search_1.findFilesToUpload)(inputs.searchPath, {
includeGitDirectory: inputs.includeGitDirectory
});
if (searchResult.filesToUpload.length === 0) {
// No files were found, different use cases warrant different types of behavior if nothing is found
switch (inputs.ifNoFilesFound) {
@ -135988,7 +136181,7 @@ module.exports = index;
/***/ ((module) => {
"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.5","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
/***/ }),

View File

@ -207,3 +207,41 @@ jobs:
```
Note that this will download all artifacts to a temporary directory and reupload them as a single artifact. For more information on inputs and other use cases for `actions/upload-artifact/merge@v4`, see [the action documentation](../merge/README.md).
## `.git` Directory
By default, files in the `.git` directory are ignored to avoid unintentionally uploading
credentials.
In versions of this action before `v4.4.0`, files in the `.git` directory were included by default.
If this directory is required, ensure credentials are not saved in `.git/config` and then
enable the `include-git-directory` input.
```yaml
jobs:
upload:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Upload Artifact
uses: actions/upload-artifact@v3
with:
path: .
```
```diff
jobs:
upload:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
+ with:
+ persist-credentials: false
- name: Upload Artifact
- uses: actions/upload-artifact@v3
+ uses: actions/upload-artifact@v4
with:
path: .
+ include-git-directory: true
```

View File

@ -5,6 +5,7 @@ Merge multiple [Actions Artifacts](https://docs.github.com/en/actions/using-work
- [`@actions/upload-artifact/merge`](#actionsupload-artifactmerge)
- [Usage](#usage)
- [Inputs](#inputs)
- [Uploading the `.git` directory](#uploading-the-git-directory)
- [Outputs](#outputs)
- [Examples](#examples)
- [Combining all artifacts in a workflow run](#combining-all-artifacts-in-a-workflow-run)
@ -59,6 +60,44 @@ For most cases, this may not be the most efficient solution. See [the migration
compression-level:
```
#### Uploading the `.git` directory
By default, files in a `.git` directory are ignored in the merged artifact.
This is intended to prevent accidentally uploading Git credentials into an artifact that could then
be extracted.
If files in the `.git` directory are needed, ensure that `actions/checkout` is being used with
`persist-credentials: false`.
```yaml
jobs:
upload:
runs-on: ubuntu-latest
strategy:
matrix:
foo: [a, b, c]
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false # Ensure credentials are not saved in `.git/config`
- name: Upload
uses: actions/upload-artifact@v4
with:
name: my-artifact-${{ matrix.foo }}
path: .
include-git-directory: true
merge:
runs-on: ubuntu-latest
steps:
- uses: actions/upload-artifact/merge@v4
with:
include-git-directory: true
```
### Outputs
| Name | Description | Example |

View File

@ -36,6 +36,9 @@ inputs:
If true, the artifacts that were merged will be deleted.
If false, the artifacts will still exist.
default: 'false'
include-git-directory:
description: 'Include files in the .git directory in the merged artifact.'
default: 'false'
outputs:
artifact-id:

58
package-lock.json generated
View File

@ -1,15 +1,15 @@
{
"name": "upload-artifact",
"version": "4.3.2",
"version": "4.3.6",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "upload-artifact",
"version": "4.3.2",
"version": "4.3.6",
"license": "MIT",
"dependencies": {
"@actions/artifact": "^2.1.5",
"@actions/artifact": "2.1.8",
"@actions/core": "^1.10.1",
"@actions/github": "^6.0.0",
"@actions/glob": "^0.3.0",
@ -34,9 +34,9 @@
}
},
"node_modules/@actions/artifact": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.5.tgz",
"integrity": "sha512-V98roImcfgWq7YtL2gzT2p2PTv1oy7k1xloq2RW/EDsJ7fX4oL7x8v9bLmmTPQ5+f4OrUbwveDZNw4iSjbtiWA==",
"version": "2.1.8",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz",
"integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==",
"dependencies": {
"@actions/core": "^1.10.0",
"@actions/github": "^5.1.1",
@ -2838,12 +2838,12 @@
}
},
"node_modules/braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"dependencies": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
},
"engines": {
"node": ">=8"
@ -4275,9 +4275,9 @@
}
},
"node_modules/fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"dependencies": {
"to-regex-range": "^5.0.1"
@ -7584,9 +7584,9 @@
}
},
"node_modules/undici": {
"version": "5.28.2",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz",
"integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==",
"version": "5.28.4",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
"integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
"dependencies": {
"@fastify/busboy": "^2.0.0"
},
@ -7902,9 +7902,9 @@
},
"dependencies": {
"@actions/artifact": {
"version": "2.1.5",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.5.tgz",
"integrity": "sha512-V98roImcfgWq7YtL2gzT2p2PTv1oy7k1xloq2RW/EDsJ7fX4oL7x8v9bLmmTPQ5+f4OrUbwveDZNw4iSjbtiWA==",
"version": "2.1.8",
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz",
"integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==",
"requires": {
"@actions/core": "^1.10.0",
"@actions/github": "^5.1.1",
@ -10102,12 +10102,12 @@
}
},
"braces": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz",
"integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==",
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
"dev": true,
"requires": {
"fill-range": "^7.0.1"
"fill-range": "^7.1.1"
}
},
"browserslist": {
@ -11196,9 +11196,9 @@
}
},
"fill-range": {
"version": "7.0.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz",
"integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==",
"version": "7.1.1",
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
"dev": true,
"requires": {
"to-regex-range": "^5.0.1"
@ -13638,9 +13638,9 @@
}
},
"undici": {
"version": "5.28.2",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.2.tgz",
"integrity": "sha512-wh1pHJHnUeQV5Xa8/kyQhO7WFa8M34l026L5P/+2TYiakvGy5Rdc8jWZVyG7ieht/0WgJLEd3kcU5gKx+6GC8w==",
"version": "5.28.4",
"resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz",
"integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==",
"requires": {
"@fastify/busboy": "^2.0.0"
}

View File

@ -1,6 +1,6 @@
{
"name": "upload-artifact",
"version": "4.3.2",
"version": "4.4.0",
"description": "Upload an Actions Artifact in a workflow run",
"main": "dist/upload/index.js",
"scripts": {
@ -29,7 +29,7 @@
},
"homepage": "https://github.com/actions/upload-artifact#readme",
"dependencies": {
"@actions/artifact": "^2.1.5",
"@actions/artifact": "2.1.8",
"@actions/core": "^1.10.1",
"@actions/github": "^6.0.0",
"@actions/glob": "^0.3.0",

View File

@ -5,5 +5,6 @@ export enum Inputs {
SeparateDirectories = 'separate-directories',
RetentionDays = 'retention-days',
CompressionLevel = 'compression-level',
DeleteMerged = 'delete-merged'
DeleteMerged = 'delete-merged',
IncludeGitDirectory = 'include-git-directory'
}

View File

@ -10,6 +10,7 @@ export function getInputs(): MergeInputs {
const pattern = core.getInput(Inputs.Pattern, {required: true})
const separateDirectories = core.getBooleanInput(Inputs.SeparateDirectories)
const deleteMerged = core.getBooleanInput(Inputs.DeleteMerged)
const includeGitDirectory = core.getBooleanInput(Inputs.IncludeGitDirectory)
const inputs = {
name,
@ -17,7 +18,8 @@ export function getInputs(): MergeInputs {
separateDirectories,
deleteMerged,
retentionDays: 0,
compressionLevel: 6
compressionLevel: 6,
includeGitDirectory
} as MergeInputs
const retentionDaysStr = core.getInput(Inputs.RetentionDays)

View File

@ -62,7 +62,9 @@ export async function run(): Promise<void> {
options.compressionLevel = inputs.compressionLevel
}
const searchResult = await findFilesToUpload(tmpDir)
const searchResult = await findFilesToUpload(tmpDir, {
includeGitDirectory: inputs.includeGitDirectory
})
await uploadArtifact(
inputs.name,

View File

@ -30,4 +30,9 @@ export interface MergeInputs {
* If false, the artifacts will be merged into the root of the destination.
*/
separateDirectories: boolean
/**
* Include files in the `.git` directory in the artifact
*/
includeGitDirectory: boolean
}

View File

@ -78,15 +78,21 @@ function getMultiPathLCA(searchPaths: string[]): string {
return path.join(...commonPaths)
}
export interface SearchOptions {
/**
* Indicates whether files in the .git directory should be included in the artifact
*
* @default false
*/
includeGitDirectory: boolean
}
export async function findFilesToUpload(
searchPath: string,
globOptions?: glob.GlobOptions
searchOptions?: SearchOptions
): Promise<SearchResult> {
const searchResults: string[] = []
const globber = await glob.create(
searchPath,
globOptions || getDefaultGlobOptions()
)
const globber = await glob.create(searchPath, getDefaultGlobOptions())
const rawSearchResults: string[] = await globber.glob()
/*
@ -104,6 +110,12 @@ export async function findFilesToUpload(
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
if (!fileStats.isDirectory()) {
debug(`File:${searchResult} was found using the provided searchPath`)
if (!searchOptions?.includeGitDirectory && inGitDirectory(searchResult)) {
debug(`Ignoring ${searchResult} because it is in the .git directory`)
continue
}
searchResults.push(searchResult)
// detect any files that would be overwritten because of case insensitivity
@ -155,3 +167,15 @@ export async function findFilesToUpload(
rootDirectory: searchPaths[0]
}
}
function inGitDirectory(filePath: string): boolean {
// The .git directory is a directory, so we need to check if the file path is a directory
// and if it is a .git directory
for (const part of filePath.split(path.sep)) {
if (part === '.git') {
return true
}
}
return false
}

View File

@ -5,7 +5,8 @@ export enum Inputs {
IfNoFilesFound = 'if-no-files-found',
RetentionDays = 'retention-days',
CompressionLevel = 'compression-level',
Overwrite = 'overwrite'
Overwrite = 'overwrite',
IncludeGitDirectory = 'include-git-directory'
}
export enum NoFileOptions {

View File

@ -9,6 +9,7 @@ export function getInputs(): UploadInputs {
const name = core.getInput(Inputs.Name)
const path = core.getInput(Inputs.Path, {required: true})
const overwrite = core.getBooleanInput(Inputs.Overwrite)
const includeGitDirectory = core.getBooleanInput(Inputs.IncludeGitDirectory)
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
@ -27,7 +28,8 @@ export function getInputs(): UploadInputs {
artifactName: name,
searchPath: path,
ifNoFilesFound: noFileBehavior,
overwrite: overwrite
overwrite: overwrite,
includeGitDirectory: includeGitDirectory
} as UploadInputs
const retentionDaysStr = core.getInput(Inputs.RetentionDays)

View File

@ -24,7 +24,9 @@ async function deleteArtifactIfExists(artifactName: string): Promise<void> {
export async function run(): Promise<void> {
const inputs = getInputs()
const searchResult = await findFilesToUpload(inputs.searchPath)
const searchResult = await findFilesToUpload(inputs.searchPath, {
includeGitDirectory: inputs.includeGitDirectory
})
if (searchResult.filesToUpload.length === 0) {
// No files were found, different use cases warrant different types of behavior if nothing is found
switch (inputs.ifNoFilesFound) {

View File

@ -30,4 +30,9 @@ export interface UploadInputs {
* Whether or not to replace an existing artifact with the same name
*/
overwrite: boolean
/**
* Include files in the `.git` directory in the artifact
*/
includeGitDirectory: boolean
}