mirror of
https://github.com/actions/upload-artifact.git
synced 2025-06-15 16:27:11 +02:00
Compare commits
12 Commits
master
...
v2-preview
Author | SHA1 | Date | |
---|---|---|---|
0aad9dab0a | |||
cb22e2637a | |||
827c1aa5e0 | |||
342dfd841f | |||
49f93b55a2 | |||
f72ed18289 | |||
f42ee54997 | |||
85bdb8f4a9 | |||
40fe78158e | |||
826ad0a00b | |||
f0ad69f4df | |||
c9be818b8a |
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,33 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Version**
|
||||
- [ ] V1
|
||||
- [ ] V2
|
||||
|
||||
**Environment**
|
||||
- [ ] self-hosted
|
||||
- [ ] Linux
|
||||
- [ ] Windows
|
||||
- [ ] Mac
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Run/Repo Url**
|
||||
If applicable, and if your repo/run is public, please include a URL so it is easier for us to investigate.
|
||||
|
||||
**How to reproduce**
|
||||
If applicable, add information on how to reproduce the problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
44
.github/workflows/codeql-analysis.yml
vendored
44
.github/workflows/codeql-analysis.yml
vendored
@ -1,44 +0,0 @@
|
||||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
schedule:
|
||||
- cron: '0 6 * * 3'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
# CodeQL runs on ubuntu-latest and windows-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
30
.github/workflows/test.yml
vendored
30
.github/workflows/test.yml
vendored
@ -75,16 +75,6 @@ jobs:
|
||||
name: 'GZip-Artifact'
|
||||
path: path/to/dir-3/
|
||||
|
||||
# Upload a directory that contains a file that will be uploaded with GZip
|
||||
- name: 'Upload artifact #4'
|
||||
uses: ./
|
||||
with:
|
||||
name: 'Multi-Path-Artifact'
|
||||
path: |
|
||||
path/to/dir-1/*
|
||||
path/to/dir-[23]/*
|
||||
!path/to/dir-3/*.txt
|
||||
|
||||
# Verify artifacts. Switch to download-artifact@v2 once it's out of preview
|
||||
|
||||
# Download Artifact #1 and verify the correctness of the content
|
||||
@ -148,23 +138,3 @@ jobs:
|
||||
Write-Error "File contents of downloaded artifact is incorrect"
|
||||
}
|
||||
shell: pwsh
|
||||
|
||||
- name: 'Download artifact #4'
|
||||
uses: actions/download-artifact@v1
|
||||
with:
|
||||
name: 'Multi-Path-Artifact'
|
||||
path: multi/artifact
|
||||
|
||||
- name: 'Verify Artifact #4'
|
||||
run: |
|
||||
$file1 = "multi/artifact/dir-1/file1.txt"
|
||||
$file2 = "multi/artifact/dir-2/file2.txt"
|
||||
if(!(Test-Path -path $file1) -or !(Test-Path -path $file2))
|
||||
{
|
||||
Write-Error "Expected files do not exist"
|
||||
}
|
||||
if(!((Get-Content $file1) -ceq "Lorem ipsum dolor sit amet") -or !((Get-Content $file2) -ceq "Hello world from file #2"))
|
||||
{
|
||||
Write-Error "File contents of downloaded artifacts are incorrect"
|
||||
}
|
||||
shell: pwsh
|
||||
|
@ -1,76 +0,0 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to make participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all project spaces, and it also applies when
|
||||
an individual is representing the project or its community in public spaces.
|
||||
Examples of representing a project or community include using an official
|
||||
project e-mail address, posting via an official social media account, or acting
|
||||
as an appointed representative at an online or offline event. Representation of
|
||||
a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at opensource@github.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
@ -1,52 +0,0 @@
|
||||
## Contributing
|
||||
|
||||
[fork]: https://github.com/actions/upload-artifact/fork
|
||||
[pr]: https://github.com/actions/upload-artifact/compare
|
||||
[style]: https://github.com/styleguide/js
|
||||
[code-of-conduct]: CODE_OF_CONDUCT.md
|
||||
|
||||
Hi there! We're thrilled that you'd like to contribute to this project. Your help is essential for keeping it great.
|
||||
|
||||
Contributions to this project are [released](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license) to the public under the [project's open source license](LICENSE).
|
||||
|
||||
Please note that this project is released with a [Contributor Code of Conduct][code-of-conduct]. By participating in this project you agree to abide by its terms.
|
||||
|
||||
## Found a bug?
|
||||
|
||||
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/actions/upload-artifact/issues).
|
||||
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/actions/upload-artifact/issues/new). Be sure to include a **title and clear description**, as much relevant information as possible, and a **code sample** or a **reproducable test case** demonstrating the expected behavior that is not occurring.
|
||||
- If possible, use the relevant bug report templates to create the issue.
|
||||
|
||||
## What should I know before submitting a pull request or issue
|
||||
|
||||
The code related to `upload-artifact` is split between this repository and [actions/toolkit](https://github.com/actions/toolkit) where the `@actions/artifact` npm package is housed. The npm package contains the core functionality to interact with artifacts. Any extra functionality on top of interacting with the apis such as search is inside this repository.
|
||||
Artifact related issues will be tracked in this repository so please do not open duplicate issues in `actions/toolkit`.
|
||||
|
||||
## Submitting a pull request
|
||||
|
||||
1. [Fork][fork] and clone the repository
|
||||
2. Configure and install the dependencies: `npm install`
|
||||
3. Make sure the tests pass on your machine: `npm run test`
|
||||
4. Create a new branch: `git checkout -b my-branch-name`
|
||||
5. Make your change, add tests, and make sure the tests still pass
|
||||
6. Make sure your code is correctly formatted: `npm run format`
|
||||
7. Make sure your code passes linting: `npm run lint`
|
||||
8. Update `dist/index.js` using `npm run release`. This creates a single javascript file that is used as an entry-point for the action
|
||||
7. Push to your fork and [submit a pull request][pr]
|
||||
8. Pat your self on the back and wait for your pull request to be reviewed and merged.
|
||||
|
||||
Here are a few things you can do that will increase the likelihood of your pull request being accepted:
|
||||
|
||||
- Write tests.
|
||||
- Keep your change as focused as possible. If there are multiple changes you would like to make that are not dependent upon each other, consider submitting them as separate pull requests.
|
||||
- Write a [good commit message](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
|
||||
|
||||
## Resources
|
||||
|
||||
- [How to Contribute to Open Source](https://opensource.guide/how-to-contribute/)
|
||||
- [Using Pull Requests](https://help.github.com/articles/about-pull-requests/)
|
||||
- [GitHub Help](https://help.github.com)
|
||||
|
||||
Thanks! :heart: :heart: :heart:
|
||||
|
||||
GitHub Actions Team :octocat:
|
58
README.md
58
README.md
@ -10,9 +10,6 @@ See also [download-artifact](https://github.com/actions/download-artifact).
|
||||
- Specify a wildcard pattern
|
||||
- Specify an individual file
|
||||
- Specify a directory (previously you were limited to only this option)
|
||||
- Multi path upload
|
||||
- Use a combination of individual files, wildcards or directories
|
||||
- Support for excluding certain files
|
||||
- Upload an artifact without providing a name
|
||||
- Fix for artifact uploads sometimes not working with containers
|
||||
- Proxy support out of the box
|
||||
@ -48,7 +45,7 @@ steps:
|
||||
path: path/to/artifact/ # or path/to/artifact
|
||||
```
|
||||
|
||||
### Upload using a Wildcard Pattern
|
||||
### Upload using a Wildcard Pattern:
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -56,37 +53,11 @@ steps:
|
||||
path: path/**/[abc]rtifac?/*
|
||||
```
|
||||
|
||||
### Upload using Multiple Paths and Exclusions
|
||||
```yaml
|
||||
- uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: my-artifact
|
||||
path: |
|
||||
path/output/bin/
|
||||
path/output/test-results
|
||||
!path/**/*.tmp
|
||||
```
|
||||
|
||||
For supported wildcards along with behavior and documentation, see [@actions/glob](https://github.com/actions/toolkit/tree/master/packages/glob) which is used internally to search for files.
|
||||
|
||||
If a wildcard pattern is used, the path hierarchy will be preserved after the first wildcard pattern.
|
||||
|
||||
```
|
||||
path/to/*/directory/foo?.txt =>
|
||||
∟ path/to/some/directory/foo1.txt
|
||||
∟ path/to/some/directory/foo2.txt
|
||||
∟ path/to/other/directory/foo1.txt
|
||||
|
||||
would be flattened and uploaded as =>
|
||||
∟ some/directory/foo1.txt
|
||||
∟ some/directory/foo2.txt
|
||||
∟ other/directory/foo1.txt
|
||||
```
|
||||
If multiple paths are provided as input, the least common ancestor of all the search paths will be used as the root directory of the artifact. Exclude paths do not effect the directory structure.
|
||||
|
||||
Relative and absolute file paths are both allowed. Relative paths are rooted against the current working directory. Paths that begin with a wildcard character should be quoted to avoid being interpreted as YAML aliases.
|
||||
|
||||
The [@actions/artifact](https://github.com/actions/toolkit/tree/master/packages/artifact) package is used internally to handle most of the logic around uploading an artifact. There is extra documentation around upload limitations and behavior in the toolkit repo that is worth checking out.
|
||||
The [@actions/artifact](https://github.com/actions/toolkit/tree/master/packages/artifact) package is also used internally to handle most of the logic around uploading an artifact. There is extra documentation around upload limitations and behavior in the toolkit repo that is worth checking out.
|
||||
|
||||
### Conditional Artifact Upload
|
||||
|
||||
@ -133,31 +104,6 @@ Each artifact behaves as a file share. Uploading to the same artifact multiple t
|
||||
```
|
||||
With the following example, the available artifact (named `artifact` which is the default if no name is provided) would contain both `world.txt` (`hello`) and `extra-file.txt` (`howdy`).
|
||||
|
||||
> **_Warning:_** Be careful when uploading to the same artifact via multiple jobs as artifacts may become corrupted
|
||||
|
||||
```yaml
|
||||
strategy:
|
||||
matrix:
|
||||
node-version: [8.x, 10.x, 12.x, 13.x]
|
||||
steps:
|
||||
- name: 'Create a file'
|
||||
run: echo ${{ matrix.node-version }} > my_file.txt
|
||||
- name: 'Accidently upload to the same artifact via multiple jobs'
|
||||
uses: 'actions/upload-artifact@v2'
|
||||
with:
|
||||
name: my-artifact
|
||||
path: ${{ github.workspace }}
|
||||
```
|
||||
|
||||
In the above example, four jobs will upload four different files to the same artifact but there will only be one file available when `my-artifact` is downloaded. Each job overwrites what was previously uploaded. To ensure that jobs don't overwrite existing artifacts, use a different name per job.
|
||||
|
||||
```yaml
|
||||
uses: 'actions/upload-artifact@v2'
|
||||
with:
|
||||
name: my-artifact ${{ matrix.node-version }}
|
||||
path: ${{ github.workspace }}
|
||||
```
|
||||
|
||||
### Environment Variables and Tilde Expansion
|
||||
|
||||
You can use `~` in the path input as a substitute for `$HOME`. Basic tilde expansion is supported.
|
||||
|
@ -231,7 +231,7 @@ describe('Search', () => {
|
||||
|
||||
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem5Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem1Path)).toEqual(
|
||||
@ -265,7 +265,7 @@ describe('Search', () => {
|
||||
|
||||
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem5Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem1Path)).toEqual(
|
||||
@ -286,70 +286,4 @@ describe('Search', () => {
|
||||
|
||||
expect(searchResult.rootDirectory).toEqual(root)
|
||||
})
|
||||
|
||||
it('Multi path search - root directory', async () => {
|
||||
const searchPath1 = path.join(root, 'folder-a')
|
||||
const searchPath2 = path.join(root, 'folder-d')
|
||||
|
||||
const searchPaths = searchPath1 + '\n' + searchPath2
|
||||
const searchResult = await findFilesToUpload(searchPaths)
|
||||
|
||||
expect(searchResult.rootDirectory).toEqual(root)
|
||||
expect(searchResult.filesToUpload.length).toEqual(7)
|
||||
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem1Path)).toEqual(
|
||||
true
|
||||
)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem2Path)).toEqual(
|
||||
true
|
||||
)
|
||||
expect(searchResult.filesToUpload.includes(extraFileInFolderCPath)).toEqual(
|
||||
true
|
||||
)
|
||||
})
|
||||
|
||||
it('Multi path search - with exclude character', async () => {
|
||||
const searchPath1 = path.join(root, 'folder-a')
|
||||
const searchPath2 = path.join(root, 'folder-d')
|
||||
const searchPath3 = path.join(root, 'folder-a', 'folder-b', '**/extra*.txt')
|
||||
|
||||
// negating the third search path
|
||||
const searchPaths = searchPath1 + '\n' + searchPath2 + '\n!' + searchPath3
|
||||
const searchResult = await findFilesToUpload(searchPaths)
|
||||
|
||||
expect(searchResult.rootDirectory).toEqual(root)
|
||||
expect(searchResult.filesToUpload.length).toEqual(5)
|
||||
expect(searchResult.filesToUpload.includes(searchItem1Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem2Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem3Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(searchItem4Path)).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem2Path)).toEqual(
|
||||
true
|
||||
)
|
||||
})
|
||||
|
||||
it('Multi path search - non root directory', async () => {
|
||||
const searchPath1 = path.join(root, 'folder-h', 'folder-i')
|
||||
const searchPath2 = path.join(root, 'folder-h', 'folder-j', 'folder-k')
|
||||
const searchPath3 = amazingFileInFolderHPath
|
||||
|
||||
const searchPaths = [searchPath1, searchPath2, searchPath3].join('\n')
|
||||
const searchResult = await findFilesToUpload(searchPaths)
|
||||
|
||||
expect(searchResult.rootDirectory).toEqual(path.join(root, 'folder-h'))
|
||||
expect(searchResult.filesToUpload.length).toEqual(4)
|
||||
expect(
|
||||
searchResult.filesToUpload.includes(amazingFileInFolderHPath)
|
||||
).toEqual(true)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem4Path)).toEqual(
|
||||
true
|
||||
)
|
||||
expect(searchResult.filesToUpload.includes(extraSearchItem5Path)).toEqual(
|
||||
true
|
||||
)
|
||||
expect(searchResult.filesToUpload.includes(lonelyFilePath)).toEqual(true)
|
||||
})
|
||||
})
|
||||
|
244
dist/index.js
vendored
244
dist/index.js
vendored
@ -2563,9 +2563,7 @@ class BasicCredentialHandler {
|
||||
this.password = password;
|
||||
}
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' +
|
||||
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@ -2601,8 +2599,7 @@ class PersonalAccessTokenCredentialHandler {
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@ -3787,6 +3784,7 @@ class DefaultArtifactClient {
|
||||
});
|
||||
}
|
||||
downloadArtifact(name, path, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
|
||||
const artifacts = yield downloadHttpClient.listArtifacts();
|
||||
@ -3806,7 +3804,7 @@ class DefaultArtifactClient {
|
||||
path = path_1.normalize(path);
|
||||
path = path_1.resolve(path);
|
||||
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
||||
const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false);
|
||||
const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, ((_a = options) === null || _a === void 0 ? void 0 : _a.createArtifactFolder) || false);
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`);
|
||||
}
|
||||
@ -4003,15 +4001,10 @@ function run() {
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`);
|
||||
const artifactClient = artifact_1.create();
|
||||
const options = {
|
||||
continueOnError: false
|
||||
continueOnError: true
|
||||
};
|
||||
const uploadResponse = yield artifactClient.uploadArtifact(name || constants_1.getDefaultArtifactName(), searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||
if (uploadResponse.failedItems.length > 0) {
|
||||
core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`);
|
||||
}
|
||||
else {
|
||||
core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`);
|
||||
}
|
||||
yield artifactClient.uploadArtifact(name || constants_1.getDefaultArtifactName(), searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||
core.info('Artifact upload has finished successfully!');
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
@ -5347,18 +5340,8 @@ function getProxyUrl(serverUrl) {
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
const HttpRedirectCodes = [
|
||||
HttpCodes.MovedPermanently,
|
||||
HttpCodes.ResourceMoved,
|
||||
HttpCodes.SeeOther,
|
||||
HttpCodes.TemporaryRedirect,
|
||||
HttpCodes.PermanentRedirect
|
||||
];
|
||||
const HttpResponseRetryCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
||||
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
@ -5483,22 +5466,18 @@ class HttpClient {
|
||||
*/
|
||||
async request(verb, requestUrl, data, headers) {
|
||||
if (this._disposed) {
|
||||
throw new Error('Client has already been disposed.');
|
||||
throw new Error("Client has already been disposed.");
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
? this._maxRetries + 1
|
||||
: 1;
|
||||
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
||||
let numTries = 0;
|
||||
let response;
|
||||
while (numTries < maxTries) {
|
||||
response = await this.requestRaw(info, data);
|
||||
// Check if it's an authentication challenge
|
||||
if (response &&
|
||||
response.message &&
|
||||
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
let authenticationHandler;
|
||||
for (let i = 0; i < this.handlers.length; i++) {
|
||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||
@ -5516,32 +5495,21 @@ class HttpClient {
|
||||
}
|
||||
}
|
||||
let redirectsRemaining = this._maxRedirects;
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||
this._allowRedirects &&
|
||||
redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers['location'];
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
||||
&& this._allowRedirects
|
||||
&& redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers["location"];
|
||||
if (!redirectUrl) {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
||||
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
||||
}
|
||||
// we need to finish reading the response before reassigning response
|
||||
// which will leak the open socket.
|
||||
await response.readBody();
|
||||
// strip authorization header if redirected to a different hostname
|
||||
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||
for (let header in headers) {
|
||||
// header names are case insensitive
|
||||
if (header.toLowerCase() === 'authorization') {
|
||||
delete headers[header];
|
||||
}
|
||||
}
|
||||
}
|
||||
// let's make the request with the new redirectUrl
|
||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||
response = await this.requestRaw(info, data);
|
||||
@ -5592,8 +5560,8 @@ class HttpClient {
|
||||
*/
|
||||
requestRawWithCallback(info, data, onResult) {
|
||||
let socket;
|
||||
if (typeof data === 'string') {
|
||||
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||
if (typeof (data) === 'string') {
|
||||
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
||||
}
|
||||
let callbackCalled = false;
|
||||
let handleResult = (err, res) => {
|
||||
@ -5606,7 +5574,7 @@ class HttpClient {
|
||||
let res = new HttpClientResponse(msg);
|
||||
handleResult(null, res);
|
||||
});
|
||||
req.on('socket', sock => {
|
||||
req.on('socket', (sock) => {
|
||||
socket = sock;
|
||||
});
|
||||
// If we ever get disconnected, we want the socket to timeout eventually
|
||||
@ -5621,10 +5589,10 @@ class HttpClient {
|
||||
// res should have headers
|
||||
handleResult(err, null);
|
||||
});
|
||||
if (data && typeof data === 'string') {
|
||||
if (data && typeof (data) === 'string') {
|
||||
req.write(data, 'utf8');
|
||||
}
|
||||
if (data && typeof data !== 'string') {
|
||||
if (data && typeof (data) !== 'string') {
|
||||
data.on('close', function () {
|
||||
req.end();
|
||||
});
|
||||
@ -5651,34 +5619,31 @@ class HttpClient {
|
||||
const defaultPort = usingSsl ? 443 : 80;
|
||||
info.options = {};
|
||||
info.options.host = info.parsedUrl.hostname;
|
||||
info.options.port = info.parsedUrl.port
|
||||
? parseInt(info.parsedUrl.port)
|
||||
: defaultPort;
|
||||
info.options.path =
|
||||
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
||||
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.method = method;
|
||||
info.options.headers = this._mergeHeaders(headers);
|
||||
if (this.userAgent != null) {
|
||||
info.options.headers['user-agent'] = this.userAgent;
|
||||
info.options.headers["user-agent"] = this.userAgent;
|
||||
}
|
||||
info.options.agent = this._getAgent(info.parsedUrl);
|
||||
// gives handlers an opportunity to participate
|
||||
if (this.handlers) {
|
||||
this.handlers.forEach(handler => {
|
||||
this.handlers.forEach((handler) => {
|
||||
handler.prepareRequest(info.options);
|
||||
});
|
||||
}
|
||||
return info;
|
||||
}
|
||||
_mergeHeaders(headers) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||
}
|
||||
return lowercaseKeys(headers || {});
|
||||
}
|
||||
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
let clientHeader;
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||
@ -5716,7 +5681,7 @@ class HttpClient {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
}
|
||||
},
|
||||
};
|
||||
let tunnelAgent;
|
||||
const overHttps = proxyUrl.protocol === 'https:';
|
||||
@ -5743,9 +5708,7 @@ class HttpClient {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
agent.options = Object.assign(agent.options || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
@ -5806,7 +5769,7 @@ class HttpClient {
|
||||
msg = contents;
|
||||
}
|
||||
else {
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
msg = "Failed request: (" + statusCode + ")";
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
@ -6221,7 +6184,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const glob = __importStar(__webpack_require__(281));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
const core_1 = __webpack_require__(470);
|
||||
const fs_1 = __webpack_require__(747);
|
||||
const path_1 = __webpack_require__(622);
|
||||
@ -6232,57 +6194,6 @@ function getDefaultGlobOptions() {
|
||||
omitBrokenSymbolicLinks: true
|
||||
};
|
||||
}
|
||||
/**
|
||||
* If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as
|
||||
* the delimiter to control the directory structure for the artifact. This function returns the LCA
|
||||
* when given an array of search paths
|
||||
*
|
||||
* Example 1: The patterns `/foo/` and `/bar/` returns `/`
|
||||
*
|
||||
* Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo`
|
||||
*/
|
||||
function getMultiPathLCA(searchPaths) {
|
||||
if (searchPaths.length < 2) {
|
||||
throw new Error('At least two search paths must be provided');
|
||||
}
|
||||
const commonPaths = new Array();
|
||||
const splitPaths = new Array();
|
||||
let smallestPathLength = Number.MAX_SAFE_INTEGER;
|
||||
// split each of the search paths using the platform specific separator
|
||||
for (const searchPath of searchPaths) {
|
||||
core_1.debug(`Using search path ${searchPath}`);
|
||||
const splitSearchPath = path.normalize(searchPath).split(path.sep);
|
||||
// keep track of the smallest path length so that we don't accidentally later go out of bounds
|
||||
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length);
|
||||
splitPaths.push(splitSearchPath);
|
||||
}
|
||||
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
|
||||
if (searchPaths[0].startsWith(path.sep)) {
|
||||
commonPaths.push(path.sep);
|
||||
}
|
||||
let splitIndex = 0;
|
||||
// function to check if the paths are the same at a specific index
|
||||
function isPathTheSame() {
|
||||
const compare = splitPaths[0][splitIndex];
|
||||
for (let i = 1; i < splitPaths.length; i++) {
|
||||
if (compare !== splitPaths[i][splitIndex]) {
|
||||
// a non-common index has been reached
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
// Loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||
while (splitIndex < smallestPathLength) {
|
||||
if (!isPathTheSame()) {
|
||||
break;
|
||||
}
|
||||
// if all are the same, add to the end result & increment the index
|
||||
commonPaths.push(splitPaths[0][splitIndex]);
|
||||
splitIndex++;
|
||||
}
|
||||
return path.join(...commonPaths);
|
||||
}
|
||||
function findFilesToUpload(searchPath, globOptions) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const searchResults = [];
|
||||
@ -6301,16 +6212,13 @@ function findFilesToUpload(searchPath, globOptions) {
|
||||
core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`);
|
||||
}
|
||||
}
|
||||
// Calculate the root directory for the artifact using the search paths that were utilized
|
||||
/*
|
||||
Only a single search pattern is being included so only 1 searchResult is expected. In the future if multiple search patterns are
|
||||
simultaneously supported this will change
|
||||
*/
|
||||
const searchPaths = globber.getSearchPaths();
|
||||
if (searchPaths.length > 1) {
|
||||
core_1.info(`Multiple search paths detected. Calculating the least common ancestor of all paths`);
|
||||
const lcaSearchPath = getMultiPathLCA(searchPaths);
|
||||
core_1.info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`);
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: lcaSearchPath
|
||||
};
|
||||
throw new Error('Only 1 search path should be returned');
|
||||
}
|
||||
/*
|
||||
Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is
|
||||
@ -6608,8 +6516,8 @@ class UploadHttpClient {
|
||||
const artifactUrl = utils_1.getArtifactUrl();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const rawResponse = yield client.post(artifactUrl, data, headers);
|
||||
const requestOptions = utils_1.getUploadRequestOptions('application/json', false);
|
||||
const rawResponse = yield client.post(artifactUrl, data, requestOptions);
|
||||
const body = yield rawResponse.readBody();
|
||||
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
@ -6721,25 +6629,21 @@ class UploadHttpClient {
|
||||
// for creating a new GZip file, an in-memory buffer is used for compression
|
||||
if (totalFileSize < 65536) {
|
||||
const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file);
|
||||
//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
|
||||
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
|
||||
let openUploadStream;
|
||||
let uploadStream;
|
||||
if (totalFileSize < buffer.byteLength) {
|
||||
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||
openUploadStream = () => fs.createReadStream(parameters.file);
|
||||
uploadStream = fs.createReadStream(parameters.file);
|
||||
isGzip = false;
|
||||
uploadFileSize = totalFileSize;
|
||||
}
|
||||
else {
|
||||
// create a readable stream using a PassThrough stream that is both readable and writable
|
||||
openUploadStream = () => {
|
||||
const passThrough = new stream.PassThrough();
|
||||
passThrough.end(buffer);
|
||||
return passThrough;
|
||||
};
|
||||
const passThrough = new stream.PassThrough();
|
||||
passThrough.end(buffer);
|
||||
uploadStream = passThrough;
|
||||
uploadFileSize = buffer.byteLength;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, uploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// chunk failed to upload
|
||||
isUploadSuccessful = false;
|
||||
@ -6781,7 +6685,7 @@ class UploadHttpClient {
|
||||
failedChunkSizes += chunkSize;
|
||||
continue;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, {
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
@ -6811,7 +6715,7 @@ class UploadHttpClient {
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
|
||||
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||||
@ -6819,13 +6723,13 @@ class UploadHttpClient {
|
||||
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||||
uploadChunk(httpClientIndex, resourceUrl, data, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// prepare all the necessary headers before making any http call
|
||||
const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize));
|
||||
const requestOptions = utils_1.getUploadRequestOptions('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize));
|
||||
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.uploadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.sendStream('PUT', resourceUrl, openStream(), headers);
|
||||
return yield client.sendStream('PUT', resourceUrl, data, requestOptions);
|
||||
});
|
||||
let retryCount = 0;
|
||||
const retryLimit = config_variables_1.getRetryLimit();
|
||||
@ -6903,7 +6807,7 @@ class UploadHttpClient {
|
||||
*/
|
||||
patchArtifactSize(size, artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const requestOptions = utils_1.getUploadRequestOptions('application/json', false);
|
||||
const resourceUrl = new url_1.URL(utils_1.getArtifactUrl());
|
||||
resourceUrl.searchParams.append('artifactName', artifactName);
|
||||
const parameters = { Size: size };
|
||||
@ -6911,7 +6815,7 @@ class UploadHttpClient {
|
||||
core.debug(`URL is ${resourceUrl.toString()}`);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const response = yield client.patch(resourceUrl.toString(), data, headers);
|
||||
const response = yield client.patch(resourceUrl.toString(), data, requestOptions);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
@ -7344,8 +7248,8 @@ class DownloadHttpClient {
|
||||
const artifactUrl = utils_1.getArtifactUrl();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(artifactUrl, headers);
|
||||
const requestOptions = utils_1.getDownloadRequestOptions('application/json');
|
||||
const response = yield client.get(artifactUrl, requestOptions);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
@ -7366,8 +7270,8 @@ class DownloadHttpClient {
|
||||
resourceUrl.searchParams.append('itemPath', artifactName);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(resourceUrl.toString(), headers);
|
||||
const requestOptions = utils_1.getDownloadRequestOptions('application/json');
|
||||
const response = yield client.get(resourceUrl.toString(), requestOptions);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
@ -7424,16 +7328,15 @@ class DownloadHttpClient {
|
||||
let retryCount = 0;
|
||||
const retryLimit = config_variables_1.getRetryLimit();
|
||||
const destinationStream = fs.createWriteStream(downloadPath);
|
||||
const headers = utils_1.getDownloadHeaders('application/json', true, true);
|
||||
const requestOptions = utils_1.getDownloadRequestOptions('application/json', true, true);
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.downloadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.get(artifactLocation, headers);
|
||||
return yield client.get(artifactLocation, requestOptions);
|
||||
});
|
||||
// check the response headers to determine if the file was compressed using gzip
|
||||
const isGzip = (incomingHeaders) => {
|
||||
return ('content-encoding' in incomingHeaders &&
|
||||
incomingHeaders['content-encoding'] === 'gzip');
|
||||
const isGzip = (headers) => {
|
||||
return ('content-encoding' in headers && headers['content-encoding'] === 'gzip');
|
||||
};
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
|
||||
@ -7909,9 +7812,9 @@ exports.getContentRange = getContentRange;
|
||||
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
||||
* @param {boolean} acceptGzip can we accept a gzip encoded response
|
||||
* @param {string} acceptType the type of content that we can accept
|
||||
* @returns appropriate headers to make a specific http call during artifact download
|
||||
* @returns appropriate request options to make a specific http call during artifact download
|
||||
*/
|
||||
function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) {
|
||||
function getDownloadRequestOptions(contentType, isKeepAlive, acceptGzip) {
|
||||
const requestOptions = {};
|
||||
if (contentType) {
|
||||
requestOptions['Content-Type'] = contentType;
|
||||
@ -7932,7 +7835,7 @@ function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) {
|
||||
}
|
||||
return requestOptions;
|
||||
}
|
||||
exports.getDownloadHeaders = getDownloadHeaders;
|
||||
exports.getDownloadRequestOptions = getDownloadRequestOptions;
|
||||
/**
|
||||
* Sets all the necessary headers when uploading an artifact
|
||||
* @param {string} contentType the type of content being uploaded
|
||||
@ -7941,9 +7844,9 @@ exports.getDownloadHeaders = getDownloadHeaders;
|
||||
* @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
|
||||
* @param {number} contentLength the length of the content that is being uploaded
|
||||
* @param {string} contentRange the range of the content that is being uploaded
|
||||
* @returns appropriate headers to make a specific http call during artifact upload
|
||||
* @returns appropriate request options to make a specific http call during artifact upload
|
||||
*/
|
||||
function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) {
|
||||
function getUploadRequestOptions(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) {
|
||||
const requestOptions = {};
|
||||
requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`;
|
||||
if (contentType) {
|
||||
@ -7966,9 +7869,9 @@ function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength,
|
||||
}
|
||||
return requestOptions;
|
||||
}
|
||||
exports.getUploadHeaders = getUploadHeaders;
|
||||
exports.getUploadRequestOptions = getUploadRequestOptions;
|
||||
function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/artifact', [
|
||||
return new http_client_1.HttpClient('action/artifact', [
|
||||
new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken())
|
||||
]);
|
||||
}
|
||||
@ -8388,10 +8291,12 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
let proxyVar;
|
||||
if (usingSsl) {
|
||||
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||
proxyVar = process.env["https_proxy"] ||
|
||||
process.env["HTTPS_PROXY"];
|
||||
}
|
||||
else {
|
||||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
proxyVar = process.env["http_proxy"] ||
|
||||
process.env["HTTP_PROXY"];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
@ -8403,7 +8308,7 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
}
|
||||
@ -8424,10 +8329,7 @@ function checkBypass(reqUrl) {
|
||||
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||
}
|
||||
// Compare request host against noproxy
|
||||
for (let upperNoProxyItem of noProxy
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
|
4494
package-lock.json
generated
4494
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
10
package.json
10
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "upload-artifact",
|
||||
"version": "2.0.1",
|
||||
"version": "2.0.0",
|
||||
"description": "Upload a build artifact that can be used by subsequent workflow steps",
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
@ -29,7 +29,7 @@
|
||||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"devDependencies": {
|
||||
"@actions/artifact": "^0.3.2",
|
||||
"@actions/artifact": "^0.3.1",
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2",
|
||||
@ -38,12 +38,12 @@
|
||||
"@typescript-eslint/parser": "^2.27.0",
|
||||
"@zeit/ncc": "^0.22.1",
|
||||
"concurrently": "^5.1.0",
|
||||
"eslint": "^7.4.0",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-plugin-github": "^3.4.1",
|
||||
"eslint-plugin-jest": "^23.8.2",
|
||||
"glob": "^7.1.6",
|
||||
"jest": "^26.1.0",
|
||||
"jest-circus": "^26.1.0",
|
||||
"jest": "^25.3.0",
|
||||
"jest-circus": "^25.3.0",
|
||||
"prettier": "^2.0.4",
|
||||
"ts-jest": "^25.3.1",
|
||||
"typescript": "^3.8.3"
|
||||
|
@ -1,6 +1,5 @@
|
||||
import * as glob from '@actions/glob'
|
||||
import * as path from 'path'
|
||||
import {debug, info} from '@actions/core'
|
||||
import {debug} from '@actions/core'
|
||||
import {lstatSync} from 'fs'
|
||||
import {dirname} from 'path'
|
||||
|
||||
@ -17,65 +16,6 @@ function getDefaultGlobOptions(): glob.GlobOptions {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as
|
||||
* the delimiter to control the directory structure for the artifact. This function returns the LCA
|
||||
* when given an array of search paths
|
||||
*
|
||||
* Example 1: The patterns `/foo/` and `/bar/` returns `/`
|
||||
*
|
||||
* Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo`
|
||||
*/
|
||||
function getMultiPathLCA(searchPaths: string[]): string {
|
||||
if (searchPaths.length < 2) {
|
||||
throw new Error('At least two search paths must be provided')
|
||||
}
|
||||
|
||||
const commonPaths = new Array<string>()
|
||||
const splitPaths = new Array<string[]>()
|
||||
let smallestPathLength = Number.MAX_SAFE_INTEGER
|
||||
|
||||
// split each of the search paths using the platform specific separator
|
||||
for (const searchPath of searchPaths) {
|
||||
debug(`Using search path ${searchPath}`)
|
||||
|
||||
const splitSearchPath = path.normalize(searchPath).split(path.sep)
|
||||
|
||||
// keep track of the smallest path length so that we don't accidentally later go out of bounds
|
||||
smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length)
|
||||
splitPaths.push(splitSearchPath)
|
||||
}
|
||||
|
||||
// on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it
|
||||
if (searchPaths[0].startsWith(path.sep)) {
|
||||
commonPaths.push(path.sep)
|
||||
}
|
||||
|
||||
let splitIndex = 0
|
||||
// function to check if the paths are the same at a specific index
|
||||
function isPathTheSame(): boolean {
|
||||
const compare = splitPaths[0][splitIndex]
|
||||
for (let i = 1; i < splitPaths.length; i++) {
|
||||
if (compare !== splitPaths[i][splitIndex]) {
|
||||
// a non-common index has been reached
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Loop over all the search paths until there is a non-common ancestor or we go out of bounds
|
||||
while (splitIndex < smallestPathLength) {
|
||||
if (!isPathTheSame()) {
|
||||
break
|
||||
}
|
||||
// if all are the same, add to the end result & increment the index
|
||||
commonPaths.push(splitPaths[0][splitIndex])
|
||||
splitIndex++
|
||||
}
|
||||
return path.join(...commonPaths)
|
||||
}
|
||||
|
||||
export async function findFilesToUpload(
|
||||
searchPath: string,
|
||||
globOptions?: glob.GlobOptions
|
||||
@ -102,22 +42,13 @@ export async function findFilesToUpload(
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate the root directory for the artifact using the search paths that were utilized
|
||||
/*
|
||||
Only a single search pattern is being included so only 1 searchResult is expected. In the future if multiple search patterns are
|
||||
simultaneously supported this will change
|
||||
*/
|
||||
const searchPaths: string[] = globber.getSearchPaths()
|
||||
|
||||
if (searchPaths.length > 1) {
|
||||
info(
|
||||
`Multiple search paths detected. Calculating the least common ancestor of all paths`
|
||||
)
|
||||
const lcaSearchPath = getMultiPathLCA(searchPaths)
|
||||
info(
|
||||
`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`
|
||||
)
|
||||
|
||||
return {
|
||||
filesToUpload: searchResults,
|
||||
rootDirectory: lcaSearchPath
|
||||
}
|
||||
throw new Error('Only 1 search path should be returned')
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -21,24 +21,16 @@ async function run(): Promise<void> {
|
||||
|
||||
const artifactClient = create()
|
||||
const options: UploadOptions = {
|
||||
continueOnError: false
|
||||
continueOnError: true
|
||||
}
|
||||
const uploadResponse = await artifactClient.uploadArtifact(
|
||||
await artifactClient.uploadArtifact(
|
||||
name || getDefaultArtifactName(),
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
if (uploadResponse.failedItems.length > 0) {
|
||||
core.setFailed(
|
||||
`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`
|
||||
)
|
||||
} else {
|
||||
core.info(
|
||||
`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`
|
||||
)
|
||||
}
|
||||
core.info('Artifact upload has finished successfully!')
|
||||
}
|
||||
} catch (err) {
|
||||
core.setFailed(err.message)
|
||||
|
Reference in New Issue
Block a user