mirror of
https://github.com/actions/upload-artifact.git
synced 2025-06-14 15:57:12 +02:00
Compare commits
6 Commits
v4.3.5
...
joshmgross
Author | SHA1 | Date | |
---|---|---|---|
f626d210e8 | |||
fa37431cef | |||
3412bb46a4 | |||
834a144ee9 | |||
134dcf33c0 | |||
73a0b9c954 |
2
.licenses/npm/@actions/artifact.dep.yml
generated
2
.licenses/npm/@actions/artifact.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@actions/artifact"
|
||||
version: 2.1.9
|
||||
version: 2.1.8
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
|
26
README.md
26
README.md
@ -16,6 +16,7 @@ See also [download-artifact](https://github.com/actions/download-artifact).
|
||||
- [Breaking Changes](#breaking-changes)
|
||||
- [Usage](#usage)
|
||||
- [Inputs](#inputs)
|
||||
- [Uploading the `.git` directory](#uploading-the-git-directory)
|
||||
- [Outputs](#outputs)
|
||||
- [Examples](#examples)
|
||||
- [Upload an Individual File](#upload-an-individual-file)
|
||||
@ -64,6 +65,7 @@ There is also a new sub-action, `actions/upload-artifact/merge`. For more info,
|
||||
Due to how Artifacts are created in this new version, it is no longer possible to upload to the same named Artifact multiple times. You must either split the uploads into multiple Artifacts with different names, or only upload once. Otherwise you _will_ encounter an error.
|
||||
|
||||
3. Limit of Artifacts for an individual job. Each job in a workflow run now has a limit of 500 artifacts.
|
||||
4. With `v4.4` and later, the `.git` directory is excluded by default.
|
||||
|
||||
For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
|
||||
|
||||
@ -109,6 +111,30 @@ For assistance with breaking changes, see [MIGRATION.md](docs/MIGRATION.md).
|
||||
overwrite:
|
||||
```
|
||||
|
||||
#### Uploading the `.git` directory
|
||||
|
||||
By default, files in a `.git` directory are ignored in the uploaded artifact.
|
||||
This is intended to prevent accidentally uploading Git credentials into an artifact that could then
|
||||
be extracted.
|
||||
If files in the `.git` directory are needed, ensure that `actions/checkout` is being used with
|
||||
`persist-credentials: false`.
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false # Ensure credentials are not saved in `.git/config`
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: .
|
||||
include-git-directory: true
|
||||
```
|
||||
|
||||
### Outputs
|
||||
|
||||
| Name | Description | Example |
|
||||
|
@ -61,6 +61,12 @@ const lonelyFilePath = path.join(
|
||||
'lonely-file.txt'
|
||||
)
|
||||
|
||||
const gitConfigPath = path.join(root, '.git', 'config')
|
||||
const gitHeadPath = path.join(root, '.git', 'HEAD')
|
||||
|
||||
const nestedGitConfigPath = path.join(root, 'repository-name', '.git', 'config')
|
||||
const nestedGitHeadPath = path.join(root, 'repository-name', '.git', 'HEAD')
|
||||
|
||||
describe('Search', () => {
|
||||
beforeAll(async () => {
|
||||
// mock all output so that there is less noise when running tests
|
||||
@ -93,6 +99,11 @@ describe('Search', () => {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
await fs.mkdir(path.join(root, '.git'))
|
||||
await fs.mkdir(path.join(root, 'repository-name', '.git'), {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
await fs.writeFile(searchItem1Path, 'search item1 file')
|
||||
await fs.writeFile(searchItem2Path, 'search item2 file')
|
||||
await fs.writeFile(searchItem3Path, 'search item3 file')
|
||||
@ -110,9 +121,17 @@ describe('Search', () => {
|
||||
await fs.writeFile(amazingFileInFolderHPath, 'amazing file')
|
||||
|
||||
await fs.writeFile(lonelyFilePath, 'all by itself')
|
||||
|
||||
await fs.writeFile(gitConfigPath, 'git config file')
|
||||
await fs.writeFile(gitHeadPath, 'git head file')
|
||||
await fs.writeFile(nestedGitConfigPath, 'nested git config file')
|
||||
await fs.writeFile(nestedGitHeadPath, 'nested git head file')
|
||||
/*
|
||||
Directory structure of files that get created:
|
||||
root/
|
||||
.git/
|
||||
config
|
||||
HEAD
|
||||
folder-a/
|
||||
folder-b/
|
||||
folder-c/
|
||||
@ -136,6 +155,10 @@ describe('Search', () => {
|
||||
folder-j/
|
||||
folder-k/
|
||||
lonely-file.txt
|
||||
repository-name/
|
||||
.git/
|
||||
config
|
||||
HEAD
|
||||
search-item5.txt
|
||||
*/
|
||||
})
|
||||
@ -352,4 +375,18 @@ describe('Search', () => {
|
||||
)
|
||||
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
||||
})
|
||||
|
||||
it('Excludes .git directory by default', async () => {
|
||||
const searchResult = await findFilesToUpload(root)
|
||||
expect(searchResult.filesToUpload.length).toEqual(13)
|
||||
expect(searchResult.filesToUpload).not.toContain(gitConfigPath)
|
||||
})
|
||||
|
||||
it('Includes .git directory when includeGitDirectory is true', async () => {
|
||||
const searchResult = await findFilesToUpload(root, {
|
||||
includeGitDirectory: true
|
||||
})
|
||||
expect(searchResult.filesToUpload.length).toEqual(17)
|
||||
expect(searchResult.filesToUpload).toContain(gitConfigPath)
|
||||
})
|
||||
})
|
||||
|
@ -40,6 +40,9 @@ inputs:
|
||||
If false, the action will fail if an artifact for the given name already exists.
|
||||
Does not fail if the artifact does not exist.
|
||||
default: 'false'
|
||||
include-git-directory:
|
||||
description: 'Include files in the .git directory in the artifact.'
|
||||
default: 'false'
|
||||
|
||||
outputs:
|
||||
artifact-id:
|
||||
|
85
dist/merge/index.js
vendored
85
dist/merge/index.js
vendored
@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
const os_1 = __importDefault(__nccwpck_require__(22037));
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
@ -3050,10 +3050,6 @@ function getConcurrency() {
|
||||
return concurrency > 300 ? 300 : concurrency;
|
||||
}
|
||||
exports.getConcurrency = getConcurrency;
|
||||
function getUploadChunkTimeout() {
|
||||
return 30000; // 30 seconds
|
||||
}
|
||||
exports.getUploadChunkTimeout = getUploadChunkTimeout;
|
||||
//# sourceMappingURL=config.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -3302,34 +3298,37 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let uploadByteCount = 0;
|
||||
let lastProgressTime = Date.now();
|
||||
const abortController = new AbortController();
|
||||
const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setInterval(() => {
|
||||
if (Date.now() - lastProgressTime > interval) {
|
||||
reject(new Error('Upload progress stalled.'));
|
||||
}
|
||||
}, interval);
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearInterval(timer);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
let timeoutId;
|
||||
const chunkTimer = (timeout) => {
|
||||
// clear the previous timeout
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
const now = Date.now();
|
||||
// if there's been more than 30 seconds since the
|
||||
// last progress event, then we'll consider the upload stalled
|
||||
if (now - lastProgressTime > timeout) {
|
||||
throw new Error('Upload progress stalled.');
|
||||
}
|
||||
}, timeout);
|
||||
return timeoutId;
|
||||
};
|
||||
const maxConcurrency = (0, config_1.getConcurrency)();
|
||||
const bufferSize = (0, config_1.getUploadChunkSize)();
|
||||
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
|
||||
const blockBlobClient = blobClient.getBlockBlobClient();
|
||||
const timeoutDuration = 300000; // 30 seconds
|
||||
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
|
||||
const uploadCallback = (progress) => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`);
|
||||
uploadByteCount = progress.loadedBytes;
|
||||
chunkTimer(timeoutDuration);
|
||||
lastProgressTime = Date.now();
|
||||
};
|
||||
const options = {
|
||||
blobHTTPHeaders: { blobContentType: 'zip' },
|
||||
onProgress: uploadCallback,
|
||||
abortSignal: abortController.signal
|
||||
onProgress: uploadCallback
|
||||
};
|
||||
let sha256Hash = undefined;
|
||||
const uploadStream = new stream.PassThrough();
|
||||
@ -3338,10 +3337,9 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||
core.info('Beginning upload of artifact content to blob storage');
|
||||
try {
|
||||
yield Promise.race([
|
||||
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
|
||||
chunkTimer((0, config_1.getUploadChunkTimeout)())
|
||||
]);
|
||||
// Start the chunk timer
|
||||
timeoutId = chunkTimer(timeoutDuration);
|
||||
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
|
||||
}
|
||||
catch (error) {
|
||||
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
|
||||
@ -3350,7 +3348,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
abortController.abort();
|
||||
// clear the timeout whether or not the upload completes
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
core.info('Finished uploading artifact content to blob storage!');
|
||||
hashStream.end();
|
||||
@ -3777,6 +3778,7 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE
|
||||
const stream = __importStar(__nccwpck_require__(12781));
|
||||
const archiver = __importStar(__nccwpck_require__(43084));
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const fs_1 = __nccwpck_require__(57147);
|
||||
const config_1 = __nccwpck_require__(74610);
|
||||
exports.DEFAULT_COMPRESSION_LEVEL = 6;
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
@ -3808,7 +3810,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D
|
||||
for (const file of uploadSpecification) {
|
||||
if (file.sourcePath !== null) {
|
||||
// Add a normal file to the zip
|
||||
zip.file(file.sourcePath, {
|
||||
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
|
||||
name: file.destinationPath
|
||||
});
|
||||
}
|
||||
@ -125725,6 +125727,7 @@ var Inputs;
|
||||
Inputs["RetentionDays"] = "retention-days";
|
||||
Inputs["CompressionLevel"] = "compression-level";
|
||||
Inputs["DeleteMerged"] = "delete-merged";
|
||||
Inputs["IncludeGitDirectory"] = "include-git-directory";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
|
||||
|
||||
@ -125808,13 +125811,15 @@ function getInputs() {
|
||||
const pattern = core.getInput(constants_1.Inputs.Pattern, { required: true });
|
||||
const separateDirectories = core.getBooleanInput(constants_1.Inputs.SeparateDirectories);
|
||||
const deleteMerged = core.getBooleanInput(constants_1.Inputs.DeleteMerged);
|
||||
const includeGitDirectory = core.getBooleanInput(constants_1.Inputs.IncludeGitDirectory);
|
||||
const inputs = {
|
||||
name,
|
||||
pattern,
|
||||
separateDirectories,
|
||||
deleteMerged,
|
||||
retentionDays: 0,
|
||||
compressionLevel: 6
|
||||
compressionLevel: 6,
|
||||
includeGitDirectory
|
||||
};
|
||||
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
||||
if (retentionDaysStr) {
|
||||
@ -125930,7 +125935,9 @@ function run() {
|
||||
if (typeof inputs.compressionLevel !== 'undefined') {
|
||||
options.compressionLevel = inputs.compressionLevel;
|
||||
}
|
||||
const searchResult = yield (0, search_1.findFilesToUpload)(tmpDir);
|
||||
const searchResult = yield (0, search_1.findFilesToUpload)(tmpDir, {
|
||||
includeGitDirectory: inputs.includeGitDirectory
|
||||
});
|
||||
yield (0, upload_artifact_1.uploadArtifact)(inputs.name, searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||
core.info(`The ${artifacts.length} artifact(s) have been successfully merged!`);
|
||||
if (inputs.deleteMerged) {
|
||||
@ -126055,10 +126062,10 @@ function getMultiPathLCA(searchPaths) {
|
||||
}
|
||||
return path.join(...commonPaths);
|
||||
}
|
||||
function findFilesToUpload(searchPath, globOptions) {
|
||||
function findFilesToUpload(searchPath, searchOptions) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const searchResults = [];
|
||||
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
|
||||
const globber = yield glob.create(searchPath, getDefaultGlobOptions());
|
||||
const rawSearchResults = yield globber.glob();
|
||||
/*
|
||||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||
@ -126074,6 +126081,10 @@ function findFilesToUpload(searchPath, globOptions) {
|
||||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||
if (!fileStats.isDirectory()) {
|
||||
(0, core_1.debug)(`File:${searchResult} was found using the provided searchPath`);
|
||||
if (!(searchOptions === null || searchOptions === void 0 ? void 0 : searchOptions.includeGitDirectory) && inGitDirectory(searchResult)) {
|
||||
(0, core_1.debug)(`Ignoring ${searchResult} because it is in the .git directory`);
|
||||
continue;
|
||||
}
|
||||
searchResults.push(searchResult);
|
||||
// detect any files that would be overwritten because of case insensitivity
|
||||
if (set.has(searchResult.toLowerCase())) {
|
||||
@ -126115,6 +126126,16 @@ function findFilesToUpload(searchPath, globOptions) {
|
||||
});
|
||||
}
|
||||
exports.findFilesToUpload = findFilesToUpload;
|
||||
function inGitDirectory(filePath) {
|
||||
// The .git directory is a directory, so we need to check if the file path is a directory
|
||||
// and if it is a .git directory
|
||||
for (const part of filePath.split(path.sep)) {
|
||||
if (part === '.git') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
@ -136150,7 +136171,7 @@ module.exports = index;
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||
|
||||
/***/ }),
|
||||
|
||||
|
85
dist/upload/index.js
vendored
85
dist/upload/index.js
vendored
@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
|
||||
const os_1 = __importDefault(__nccwpck_require__(22037));
|
||||
// Used for controlling the highWaterMark value of the zip that is being streamed
|
||||
// The same value is used as the chunk size that is use during upload to blob storage
|
||||
@ -3050,10 +3050,6 @@ function getConcurrency() {
|
||||
return concurrency > 300 ? 300 : concurrency;
|
||||
}
|
||||
exports.getConcurrency = getConcurrency;
|
||||
function getUploadChunkTimeout() {
|
||||
return 30000; // 30 seconds
|
||||
}
|
||||
exports.getUploadChunkTimeout = getUploadChunkTimeout;
|
||||
//# sourceMappingURL=config.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -3302,34 +3298,37 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
let uploadByteCount = 0;
|
||||
let lastProgressTime = Date.now();
|
||||
const abortController = new AbortController();
|
||||
const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setInterval(() => {
|
||||
if (Date.now() - lastProgressTime > interval) {
|
||||
reject(new Error('Upload progress stalled.'));
|
||||
}
|
||||
}, interval);
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearInterval(timer);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
let timeoutId;
|
||||
const chunkTimer = (timeout) => {
|
||||
// clear the previous timeout
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
timeoutId = setTimeout(() => {
|
||||
const now = Date.now();
|
||||
// if there's been more than 30 seconds since the
|
||||
// last progress event, then we'll consider the upload stalled
|
||||
if (now - lastProgressTime > timeout) {
|
||||
throw new Error('Upload progress stalled.');
|
||||
}
|
||||
}, timeout);
|
||||
return timeoutId;
|
||||
};
|
||||
const maxConcurrency = (0, config_1.getConcurrency)();
|
||||
const bufferSize = (0, config_1.getUploadChunkSize)();
|
||||
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
|
||||
const blockBlobClient = blobClient.getBlockBlobClient();
|
||||
const timeoutDuration = 300000; // 30 seconds
|
||||
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
|
||||
const uploadCallback = (progress) => {
|
||||
core.info(`Uploaded bytes ${progress.loadedBytes}`);
|
||||
uploadByteCount = progress.loadedBytes;
|
||||
chunkTimer(timeoutDuration);
|
||||
lastProgressTime = Date.now();
|
||||
};
|
||||
const options = {
|
||||
blobHTTPHeaders: { blobContentType: 'zip' },
|
||||
onProgress: uploadCallback,
|
||||
abortSignal: abortController.signal
|
||||
onProgress: uploadCallback
|
||||
};
|
||||
let sha256Hash = undefined;
|
||||
const uploadStream = new stream.PassThrough();
|
||||
@ -3338,10 +3337,9 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
|
||||
core.info('Beginning upload of artifact content to blob storage');
|
||||
try {
|
||||
yield Promise.race([
|
||||
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
|
||||
chunkTimer((0, config_1.getUploadChunkTimeout)())
|
||||
]);
|
||||
// Start the chunk timer
|
||||
timeoutId = chunkTimer(timeoutDuration);
|
||||
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
|
||||
}
|
||||
catch (error) {
|
||||
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
|
||||
@ -3350,7 +3348,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
|
||||
throw error;
|
||||
}
|
||||
finally {
|
||||
abortController.abort();
|
||||
// clear the timeout whether or not the upload completes
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
core.info('Finished uploading artifact content to blob storage!');
|
||||
hashStream.end();
|
||||
@ -3777,6 +3778,7 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE
|
||||
const stream = __importStar(__nccwpck_require__(12781));
|
||||
const archiver = __importStar(__nccwpck_require__(43084));
|
||||
const core = __importStar(__nccwpck_require__(42186));
|
||||
const fs_1 = __nccwpck_require__(57147);
|
||||
const config_1 = __nccwpck_require__(74610);
|
||||
exports.DEFAULT_COMPRESSION_LEVEL = 6;
|
||||
// Custom stream transformer so we can set the highWaterMark property
|
||||
@ -3808,7 +3810,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D
|
||||
for (const file of uploadSpecification) {
|
||||
if (file.sourcePath !== null) {
|
||||
// Add a normal file to the zip
|
||||
zip.file(file.sourcePath, {
|
||||
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
|
||||
name: file.destinationPath
|
||||
});
|
||||
}
|
||||
@ -125813,10 +125815,10 @@ function getMultiPathLCA(searchPaths) {
|
||||
}
|
||||
return path.join(...commonPaths);
|
||||
}
|
||||
function findFilesToUpload(searchPath, globOptions) {
|
||||
function findFilesToUpload(searchPath, searchOptions) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const searchResults = [];
|
||||
const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions());
|
||||
const globber = yield glob.create(searchPath, getDefaultGlobOptions());
|
||||
const rawSearchResults = yield globber.glob();
|
||||
/*
|
||||
Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten
|
||||
@ -125832,6 +125834,10 @@ function findFilesToUpload(searchPath, globOptions) {
|
||||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||
if (!fileStats.isDirectory()) {
|
||||
(0, core_1.debug)(`File:${searchResult} was found using the provided searchPath`);
|
||||
if (!(searchOptions === null || searchOptions === void 0 ? void 0 : searchOptions.includeGitDirectory) && inGitDirectory(searchResult)) {
|
||||
(0, core_1.debug)(`Ignoring ${searchResult} because it is in the .git directory`);
|
||||
continue;
|
||||
}
|
||||
searchResults.push(searchResult);
|
||||
// detect any files that would be overwritten because of case insensitivity
|
||||
if (set.has(searchResult.toLowerCase())) {
|
||||
@ -125873,6 +125879,16 @@ function findFilesToUpload(searchPath, globOptions) {
|
||||
});
|
||||
}
|
||||
exports.findFilesToUpload = findFilesToUpload;
|
||||
function inGitDirectory(filePath) {
|
||||
// The .git directory is a directory, so we need to check if the file path is a directory
|
||||
// and if it is a .git directory
|
||||
for (const part of filePath.split(path.sep)) {
|
||||
if (part === '.git') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
@ -125954,6 +125970,7 @@ var Inputs;
|
||||
Inputs["RetentionDays"] = "retention-days";
|
||||
Inputs["CompressionLevel"] = "compression-level";
|
||||
Inputs["Overwrite"] = "overwrite";
|
||||
Inputs["IncludeGitDirectory"] = "include-git-directory";
|
||||
})(Inputs = exports.Inputs || (exports.Inputs = {}));
|
||||
var NoFileOptions;
|
||||
(function (NoFileOptions) {
|
||||
@ -126051,6 +126068,7 @@ function getInputs() {
|
||||
const name = core.getInput(constants_1.Inputs.Name);
|
||||
const path = core.getInput(constants_1.Inputs.Path, { required: true });
|
||||
const overwrite = core.getBooleanInput(constants_1.Inputs.Overwrite);
|
||||
const includeGitDirectory = core.getBooleanInput(constants_1.Inputs.IncludeGitDirectory);
|
||||
const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound);
|
||||
const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound];
|
||||
if (!noFileBehavior) {
|
||||
@ -126060,7 +126078,8 @@ function getInputs() {
|
||||
artifactName: name,
|
||||
searchPath: path,
|
||||
ifNoFilesFound: noFileBehavior,
|
||||
overwrite: overwrite
|
||||
overwrite: overwrite,
|
||||
includeGitDirectory: includeGitDirectory
|
||||
};
|
||||
const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays);
|
||||
if (retentionDaysStr) {
|
||||
@ -126149,7 +126168,9 @@ function deleteArtifactIfExists(artifactName) {
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const inputs = (0, input_helper_1.getInputs)();
|
||||
const searchResult = yield (0, search_1.findFilesToUpload)(inputs.searchPath);
|
||||
const searchResult = yield (0, search_1.findFilesToUpload)(inputs.searchPath, {
|
||||
includeGitDirectory: inputs.includeGitDirectory
|
||||
});
|
||||
if (searchResult.filesToUpload.length === 0) {
|
||||
// No files were found, different use cases warrant different types of behavior if nothing is found
|
||||
switch (inputs.ifNoFilesFound) {
|
||||
@ -136160,7 +136181,7 @@ module.exports = index;
|
||||
/***/ ((module) => {
|
||||
|
||||
"use strict";
|
||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
|
||||
|
||||
/***/ }),
|
||||
|
||||
|
@ -207,3 +207,41 @@ jobs:
|
||||
```
|
||||
|
||||
Note that this will download all artifacts to a temporary directory and reupload them as a single artifact. For more information on inputs and other use cases for `actions/upload-artifact/merge@v4`, see [the action documentation](../merge/README.md).
|
||||
|
||||
## `.git` Directory
|
||||
|
||||
By default, files in the `.git` directory are ignored to avoid unintentionally uploading
|
||||
credentials.
|
||||
|
||||
In versions of this action before `v4.4.0`, files in the `.git` directory were included by default.
|
||||
If this directory is required, ensure credentials are not saved in `.git/config` and then
|
||||
enable the `include-git-directory` input.
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Upload Artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
path: .
|
||||
```
|
||||
|
||||
|
||||
```diff
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
+ with:
|
||||
+ persist-credentials: false
|
||||
- name: Upload Artifact
|
||||
- uses: actions/upload-artifact@v3
|
||||
+ uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: .
|
||||
+ include-git-directory: true
|
||||
```
|
@ -5,6 +5,7 @@ Merge multiple [Actions Artifacts](https://docs.github.com/en/actions/using-work
|
||||
- [`@actions/upload-artifact/merge`](#actionsupload-artifactmerge)
|
||||
- [Usage](#usage)
|
||||
- [Inputs](#inputs)
|
||||
- [Uploading the `.git` directory](#uploading-the-git-directory)
|
||||
- [Outputs](#outputs)
|
||||
- [Examples](#examples)
|
||||
- [Combining all artifacts in a workflow run](#combining-all-artifacts-in-a-workflow-run)
|
||||
@ -59,6 +60,44 @@ For most cases, this may not be the most efficient solution. See [the migration
|
||||
compression-level:
|
||||
```
|
||||
|
||||
#### Uploading the `.git` directory
|
||||
|
||||
By default, files in a `.git` directory are ignored in the merged artifact.
|
||||
This is intended to prevent accidentally uploading Git credentials into an artifact that could then
|
||||
be extracted.
|
||||
If files in the `.git` directory are needed, ensure that `actions/checkout` is being used with
|
||||
`persist-credentials: false`.
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
upload:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
foo: [a, b, c]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false # Ensure credentials are not saved in `.git/config`
|
||||
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: my-artifact-${{ matrix.foo }}
|
||||
path: .
|
||||
include-git-directory: true
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/upload-artifact/merge@v4
|
||||
with:
|
||||
include-git-directory: true
|
||||
```
|
||||
|
||||
### Outputs
|
||||
|
||||
| Name | Description | Example |
|
||||
|
@ -36,6 +36,9 @@ inputs:
|
||||
If true, the artifacts that were merged will be deleted.
|
||||
If false, the artifacts will still exist.
|
||||
default: 'false'
|
||||
include-git-directory:
|
||||
description: 'Include files in the .git directory in the merged artifact.'
|
||||
default: 'false'
|
||||
|
||||
outputs:
|
||||
artifact-id:
|
||||
|
18
package-lock.json
generated
18
package-lock.json
generated
@ -1,15 +1,15 @@
|
||||
{
|
||||
"name": "upload-artifact",
|
||||
"version": "4.3.5",
|
||||
"version": "4.3.6",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "upload-artifact",
|
||||
"version": "4.3.5",
|
||||
"version": "4.3.6",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^2.1.9",
|
||||
"@actions/artifact": "2.1.8",
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/glob": "^0.3.0",
|
||||
@ -34,9 +34,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@actions/artifact": {
|
||||
"version": "2.1.9",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.9.tgz",
|
||||
"integrity": "sha512-f9JXC9JrwramDRJHZiIOKJo3PGw/V3riYegLj5kHi8YEJ2k72TNUd1zDW1BG50ILnzJ0cp1faDVJ2pSdolRQfg==",
|
||||
"version": "2.1.8",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz",
|
||||
"integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^5.1.1",
|
||||
@ -7902,9 +7902,9 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/artifact": {
|
||||
"version": "2.1.9",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.9.tgz",
|
||||
"integrity": "sha512-f9JXC9JrwramDRJHZiIOKJo3PGw/V3riYegLj5kHi8YEJ2k72TNUd1zDW1BG50ILnzJ0cp1faDVJ2pSdolRQfg==",
|
||||
"version": "2.1.8",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-2.1.8.tgz",
|
||||
"integrity": "sha512-kxgbllgF5f6mEdMeSW6WXlUbV1U77V9ECpA7LOYaY+Tm6RfXOm36EdXbpm+T9VPeaVqXK4QHLAgqay9GSyClgw==",
|
||||
"requires": {
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/github": "^5.1.1",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "upload-artifact",
|
||||
"version": "4.3.5",
|
||||
"version": "4.4.0",
|
||||
"description": "Upload an Actions Artifact in a workflow run",
|
||||
"main": "dist/upload/index.js",
|
||||
"scripts": {
|
||||
@ -29,7 +29,7 @@
|
||||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"dependencies": {
|
||||
"@actions/artifact": "^2.1.9",
|
||||
"@actions/artifact": "2.1.8",
|
||||
"@actions/core": "^1.10.1",
|
||||
"@actions/github": "^6.0.0",
|
||||
"@actions/glob": "^0.3.0",
|
||||
|
@ -5,5 +5,6 @@ export enum Inputs {
|
||||
SeparateDirectories = 'separate-directories',
|
||||
RetentionDays = 'retention-days',
|
||||
CompressionLevel = 'compression-level',
|
||||
DeleteMerged = 'delete-merged'
|
||||
DeleteMerged = 'delete-merged',
|
||||
IncludeGitDirectory = 'include-git-directory'
|
||||
}
|
||||
|
@ -10,6 +10,7 @@ export function getInputs(): MergeInputs {
|
||||
const pattern = core.getInput(Inputs.Pattern, {required: true})
|
||||
const separateDirectories = core.getBooleanInput(Inputs.SeparateDirectories)
|
||||
const deleteMerged = core.getBooleanInput(Inputs.DeleteMerged)
|
||||
const includeGitDirectory = core.getBooleanInput(Inputs.IncludeGitDirectory)
|
||||
|
||||
const inputs = {
|
||||
name,
|
||||
@ -17,7 +18,8 @@ export function getInputs(): MergeInputs {
|
||||
separateDirectories,
|
||||
deleteMerged,
|
||||
retentionDays: 0,
|
||||
compressionLevel: 6
|
||||
compressionLevel: 6,
|
||||
includeGitDirectory
|
||||
} as MergeInputs
|
||||
|
||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||
|
@ -62,7 +62,9 @@ export async function run(): Promise<void> {
|
||||
options.compressionLevel = inputs.compressionLevel
|
||||
}
|
||||
|
||||
const searchResult = await findFilesToUpload(tmpDir)
|
||||
const searchResult = await findFilesToUpload(tmpDir, {
|
||||
includeGitDirectory: inputs.includeGitDirectory
|
||||
})
|
||||
|
||||
await uploadArtifact(
|
||||
inputs.name,
|
||||
|
@ -30,4 +30,9 @@ export interface MergeInputs {
|
||||
* If false, the artifacts will be merged into the root of the destination.
|
||||
*/
|
||||
separateDirectories: boolean
|
||||
|
||||
/**
|
||||
* Include files in the `.git` directory in the artifact
|
||||
*/
|
||||
includeGitDirectory: boolean
|
||||
}
|
||||
|
@ -78,15 +78,21 @@ function getMultiPathLCA(searchPaths: string[]): string {
|
||||
return path.join(...commonPaths)
|
||||
}
|
||||
|
||||
export interface SearchOptions {
|
||||
/**
|
||||
* Indicates whether files in the .git directory should be included in the artifact
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
includeGitDirectory: boolean
|
||||
}
|
||||
|
||||
export async function findFilesToUpload(
|
||||
searchPath: string,
|
||||
globOptions?: glob.GlobOptions
|
||||
searchOptions?: SearchOptions
|
||||
): Promise<SearchResult> {
|
||||
const searchResults: string[] = []
|
||||
const globber = await glob.create(
|
||||
searchPath,
|
||||
globOptions || getDefaultGlobOptions()
|
||||
)
|
||||
const globber = await glob.create(searchPath, getDefaultGlobOptions())
|
||||
const rawSearchResults: string[] = await globber.glob()
|
||||
|
||||
/*
|
||||
@ -104,6 +110,12 @@ export async function findFilesToUpload(
|
||||
// isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead
|
||||
if (!fileStats.isDirectory()) {
|
||||
debug(`File:${searchResult} was found using the provided searchPath`)
|
||||
|
||||
if (!searchOptions?.includeGitDirectory && inGitDirectory(searchResult)) {
|
||||
debug(`Ignoring ${searchResult} because it is in the .git directory`)
|
||||
continue
|
||||
}
|
||||
|
||||
searchResults.push(searchResult)
|
||||
|
||||
// detect any files that would be overwritten because of case insensitivity
|
||||
@ -155,3 +167,15 @@ export async function findFilesToUpload(
|
||||
rootDirectory: searchPaths[0]
|
||||
}
|
||||
}
|
||||
|
||||
function inGitDirectory(filePath: string): boolean {
|
||||
// The .git directory is a directory, so we need to check if the file path is a directory
|
||||
// and if it is a .git directory
|
||||
for (const part of filePath.split(path.sep)) {
|
||||
if (part === '.git') {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
@ -5,7 +5,8 @@ export enum Inputs {
|
||||
IfNoFilesFound = 'if-no-files-found',
|
||||
RetentionDays = 'retention-days',
|
||||
CompressionLevel = 'compression-level',
|
||||
Overwrite = 'overwrite'
|
||||
Overwrite = 'overwrite',
|
||||
IncludeGitDirectory = 'include-git-directory'
|
||||
}
|
||||
|
||||
export enum NoFileOptions {
|
||||
|
@ -9,6 +9,7 @@ export function getInputs(): UploadInputs {
|
||||
const name = core.getInput(Inputs.Name)
|
||||
const path = core.getInput(Inputs.Path, {required: true})
|
||||
const overwrite = core.getBooleanInput(Inputs.Overwrite)
|
||||
const includeGitDirectory = core.getBooleanInput(Inputs.IncludeGitDirectory)
|
||||
|
||||
const ifNoFilesFound = core.getInput(Inputs.IfNoFilesFound)
|
||||
const noFileBehavior: NoFileOptions = NoFileOptions[ifNoFilesFound]
|
||||
@ -27,7 +28,8 @@ export function getInputs(): UploadInputs {
|
||||
artifactName: name,
|
||||
searchPath: path,
|
||||
ifNoFilesFound: noFileBehavior,
|
||||
overwrite: overwrite
|
||||
overwrite: overwrite,
|
||||
includeGitDirectory: includeGitDirectory
|
||||
} as UploadInputs
|
||||
|
||||
const retentionDaysStr = core.getInput(Inputs.RetentionDays)
|
||||
|
@ -24,7 +24,9 @@ async function deleteArtifactIfExists(artifactName: string): Promise<void> {
|
||||
|
||||
export async function run(): Promise<void> {
|
||||
const inputs = getInputs()
|
||||
const searchResult = await findFilesToUpload(inputs.searchPath)
|
||||
const searchResult = await findFilesToUpload(inputs.searchPath, {
|
||||
includeGitDirectory: inputs.includeGitDirectory
|
||||
})
|
||||
if (searchResult.filesToUpload.length === 0) {
|
||||
// No files were found, different use cases warrant different types of behavior if nothing is found
|
||||
switch (inputs.ifNoFilesFound) {
|
||||
|
@ -30,4 +30,9 @@ export interface UploadInputs {
|
||||
* Whether or not to replace an existing artifact with the same name
|
||||
*/
|
||||
overwrite: boolean
|
||||
|
||||
/**
|
||||
* Include files in the `.git` directory in the artifact
|
||||
*/
|
||||
includeGitDirectory: boolean
|
||||
}
|
||||
|
Reference in New Issue
Block a user