mirror of
https://github.com/actions/upload-artifact.git
synced 2025-06-15 08:17:13 +02:00
Compare commits
12 Commits
v2.0.1
...
v2-preview
Author | SHA1 | Date | |
---|---|---|---|
0aad9dab0a | |||
cb22e2637a | |||
827c1aa5e0 | |||
342dfd841f | |||
49f93b55a2 | |||
f72ed18289 | |||
f42ee54997 | |||
85bdb8f4a9 | |||
40fe78158e | |||
826ad0a00b | |||
f0ad69f4df | |||
c9be818b8a |
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,33 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**Version**
|
||||
- [ ] V1
|
||||
- [ ] V2
|
||||
|
||||
**Environment**
|
||||
- [ ] self-hosted
|
||||
- [ ] Linux
|
||||
- [ ] Windows
|
||||
- [ ] Mac
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Run/Repo Url**
|
||||
If applicable, and if your repo/run is public, please include a URL so it is easier for us to investigate.
|
||||
|
||||
**How to reproduce**
|
||||
If applicable, add information on how to reproduce the problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
44
.github/workflows/codeql-analysis.yml
vendored
44
.github/workflows/codeql-analysis.yml
vendored
@ -1,44 +0,0 @@
|
||||
name: "Code scanning - action"
|
||||
|
||||
on:
|
||||
push:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
schedule:
|
||||
- cron: '0 6 * * 3'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
# CodeQL runs on ubuntu-latest and windows-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
179
dist/index.js
vendored
179
dist/index.js
vendored
@ -2563,9 +2563,7 @@ class BasicCredentialHandler {
|
||||
this.password = password;
|
||||
}
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' +
|
||||
Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@ -2601,8 +2599,7 @@ class PersonalAccessTokenCredentialHandler {
|
||||
// currently implements pre-authorization
|
||||
// TODO: support preAuth = false where it hooks on 401
|
||||
prepareRequest(options) {
|
||||
options.headers['Authorization'] =
|
||||
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
|
||||
}
|
||||
// This handler cannot handle 401
|
||||
canHandleAuthentication(response) {
|
||||
@ -3787,6 +3784,7 @@ class DefaultArtifactClient {
|
||||
});
|
||||
}
|
||||
downloadArtifact(name, path, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
|
||||
const artifacts = yield downloadHttpClient.listArtifacts();
|
||||
@ -3806,7 +3804,7 @@ class DefaultArtifactClient {
|
||||
path = path_1.normalize(path);
|
||||
path = path_1.resolve(path);
|
||||
// During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
|
||||
const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false);
|
||||
const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, ((_a = options) === null || _a === void 0 ? void 0 : _a.createArtifactFolder) || false);
|
||||
if (downloadSpecification.filesToDownload.length === 0) {
|
||||
core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`);
|
||||
}
|
||||
@ -4003,15 +4001,10 @@ function run() {
|
||||
core.debug(`Root artifact directory is ${searchResult.rootDirectory}`);
|
||||
const artifactClient = artifact_1.create();
|
||||
const options = {
|
||||
continueOnError: false
|
||||
continueOnError: true
|
||||
};
|
||||
const uploadResponse = yield artifactClient.uploadArtifact(name || constants_1.getDefaultArtifactName(), searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||
if (uploadResponse.failedItems.length > 0) {
|
||||
core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`);
|
||||
}
|
||||
else {
|
||||
core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`);
|
||||
}
|
||||
yield artifactClient.uploadArtifact(name || constants_1.getDefaultArtifactName(), searchResult.filesToUpload, searchResult.rootDirectory, options);
|
||||
core.info('Artifact upload has finished successfully!');
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
@ -5347,18 +5340,8 @@ function getProxyUrl(serverUrl) {
|
||||
return proxyUrl ? proxyUrl.href : '';
|
||||
}
|
||||
exports.getProxyUrl = getProxyUrl;
|
||||
const HttpRedirectCodes = [
|
||||
HttpCodes.MovedPermanently,
|
||||
HttpCodes.ResourceMoved,
|
||||
HttpCodes.SeeOther,
|
||||
HttpCodes.TemporaryRedirect,
|
||||
HttpCodes.PermanentRedirect
|
||||
];
|
||||
const HttpResponseRetryCodes = [
|
||||
HttpCodes.BadGateway,
|
||||
HttpCodes.ServiceUnavailable,
|
||||
HttpCodes.GatewayTimeout
|
||||
];
|
||||
const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect];
|
||||
const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout];
|
||||
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
|
||||
const ExponentialBackoffCeiling = 10;
|
||||
const ExponentialBackoffTimeSlice = 5;
|
||||
@ -5483,22 +5466,18 @@ class HttpClient {
|
||||
*/
|
||||
async request(verb, requestUrl, data, headers) {
|
||||
if (this._disposed) {
|
||||
throw new Error('Client has already been disposed.');
|
||||
throw new Error("Client has already been disposed.");
|
||||
}
|
||||
let parsedUrl = url.parse(requestUrl);
|
||||
let info = this._prepareRequest(verb, parsedUrl, headers);
|
||||
// Only perform retries on reads since writes may not be idempotent.
|
||||
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
|
||||
? this._maxRetries + 1
|
||||
: 1;
|
||||
let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1;
|
||||
let numTries = 0;
|
||||
let response;
|
||||
while (numTries < maxTries) {
|
||||
response = await this.requestRaw(info, data);
|
||||
// Check if it's an authentication challenge
|
||||
if (response &&
|
||||
response.message &&
|
||||
response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) {
|
||||
let authenticationHandler;
|
||||
for (let i = 0; i < this.handlers.length; i++) {
|
||||
if (this.handlers[i].canHandleAuthentication(response)) {
|
||||
@ -5516,32 +5495,21 @@ class HttpClient {
|
||||
}
|
||||
}
|
||||
let redirectsRemaining = this._maxRedirects;
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
|
||||
this._allowRedirects &&
|
||||
redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers['location'];
|
||||
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1
|
||||
&& this._allowRedirects
|
||||
&& redirectsRemaining > 0) {
|
||||
const redirectUrl = response.message.headers["location"];
|
||||
if (!redirectUrl) {
|
||||
// if there's no location to redirect to, we won't
|
||||
break;
|
||||
}
|
||||
let parsedRedirectUrl = url.parse(redirectUrl);
|
||||
if (parsedUrl.protocol == 'https:' &&
|
||||
parsedUrl.protocol != parsedRedirectUrl.protocol &&
|
||||
!this._allowRedirectDowngrade) {
|
||||
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
|
||||
if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) {
|
||||
throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.");
|
||||
}
|
||||
// we need to finish reading the response before reassigning response
|
||||
// which will leak the open socket.
|
||||
await response.readBody();
|
||||
// strip authorization header if redirected to a different hostname
|
||||
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
|
||||
for (let header in headers) {
|
||||
// header names are case insensitive
|
||||
if (header.toLowerCase() === 'authorization') {
|
||||
delete headers[header];
|
||||
}
|
||||
}
|
||||
}
|
||||
// let's make the request with the new redirectUrl
|
||||
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
|
||||
response = await this.requestRaw(info, data);
|
||||
@ -5592,8 +5560,8 @@ class HttpClient {
|
||||
*/
|
||||
requestRawWithCallback(info, data, onResult) {
|
||||
let socket;
|
||||
if (typeof data === 'string') {
|
||||
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
|
||||
if (typeof (data) === 'string') {
|
||||
info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8');
|
||||
}
|
||||
let callbackCalled = false;
|
||||
let handleResult = (err, res) => {
|
||||
@ -5606,7 +5574,7 @@ class HttpClient {
|
||||
let res = new HttpClientResponse(msg);
|
||||
handleResult(null, res);
|
||||
});
|
||||
req.on('socket', sock => {
|
||||
req.on('socket', (sock) => {
|
||||
socket = sock;
|
||||
});
|
||||
// If we ever get disconnected, we want the socket to timeout eventually
|
||||
@ -5621,10 +5589,10 @@ class HttpClient {
|
||||
// res should have headers
|
||||
handleResult(err, null);
|
||||
});
|
||||
if (data && typeof data === 'string') {
|
||||
if (data && typeof (data) === 'string') {
|
||||
req.write(data, 'utf8');
|
||||
}
|
||||
if (data && typeof data !== 'string') {
|
||||
if (data && typeof (data) !== 'string') {
|
||||
data.on('close', function () {
|
||||
req.end();
|
||||
});
|
||||
@ -5651,34 +5619,31 @@ class HttpClient {
|
||||
const defaultPort = usingSsl ? 443 : 80;
|
||||
info.options = {};
|
||||
info.options.host = info.parsedUrl.hostname;
|
||||
info.options.port = info.parsedUrl.port
|
||||
? parseInt(info.parsedUrl.port)
|
||||
: defaultPort;
|
||||
info.options.path =
|
||||
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort;
|
||||
info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
|
||||
info.options.method = method;
|
||||
info.options.headers = this._mergeHeaders(headers);
|
||||
if (this.userAgent != null) {
|
||||
info.options.headers['user-agent'] = this.userAgent;
|
||||
info.options.headers["user-agent"] = this.userAgent;
|
||||
}
|
||||
info.options.agent = this._getAgent(info.parsedUrl);
|
||||
// gives handlers an opportunity to participate
|
||||
if (this.handlers) {
|
||||
this.handlers.forEach(handler => {
|
||||
this.handlers.forEach((handler) => {
|
||||
handler.prepareRequest(info.options);
|
||||
});
|
||||
}
|
||||
return info;
|
||||
}
|
||||
_mergeHeaders(headers) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
|
||||
}
|
||||
return lowercaseKeys(headers || {});
|
||||
}
|
||||
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
|
||||
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {});
|
||||
let clientHeader;
|
||||
if (this.requestOptions && this.requestOptions.headers) {
|
||||
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
|
||||
@ -5716,7 +5681,7 @@ class HttpClient {
|
||||
proxyAuth: proxyUrl.auth,
|
||||
host: proxyUrl.hostname,
|
||||
port: proxyUrl.port
|
||||
}
|
||||
},
|
||||
};
|
||||
let tunnelAgent;
|
||||
const overHttps = proxyUrl.protocol === 'https:';
|
||||
@ -5743,9 +5708,7 @@ class HttpClient {
|
||||
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
|
||||
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
|
||||
// we have to cast it to any and change it directly
|
||||
agent.options = Object.assign(agent.options || {}, {
|
||||
rejectUnauthorized: false
|
||||
});
|
||||
agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false });
|
||||
}
|
||||
return agent;
|
||||
}
|
||||
@ -5806,7 +5769,7 @@ class HttpClient {
|
||||
msg = contents;
|
||||
}
|
||||
else {
|
||||
msg = 'Failed request: (' + statusCode + ')';
|
||||
msg = "Failed request: (" + statusCode + ")";
|
||||
}
|
||||
let err = new Error(msg);
|
||||
// attach statusCode and body obj (if available) to the error object
|
||||
@ -6553,8 +6516,8 @@ class UploadHttpClient {
|
||||
const artifactUrl = utils_1.getArtifactUrl();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const rawResponse = yield client.post(artifactUrl, data, headers);
|
||||
const requestOptions = utils_1.getUploadRequestOptions('application/json', false);
|
||||
const rawResponse = yield client.post(artifactUrl, data, requestOptions);
|
||||
const body = yield rawResponse.readBody();
|
||||
if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
@ -6666,25 +6629,21 @@ class UploadHttpClient {
|
||||
// for creating a new GZip file, an in-memory buffer is used for compression
|
||||
if (totalFileSize < 65536) {
|
||||
const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file);
|
||||
//An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
|
||||
// it will not properly get reset to the start of the stream if a chunk upload needs to be retried
|
||||
let openUploadStream;
|
||||
let uploadStream;
|
||||
if (totalFileSize < buffer.byteLength) {
|
||||
// compression did not help with reducing the size, use a readable stream from the original file for upload
|
||||
openUploadStream = () => fs.createReadStream(parameters.file);
|
||||
uploadStream = fs.createReadStream(parameters.file);
|
||||
isGzip = false;
|
||||
uploadFileSize = totalFileSize;
|
||||
}
|
||||
else {
|
||||
// create a readable stream using a PassThrough stream that is both readable and writable
|
||||
openUploadStream = () => {
|
||||
const passThrough = new stream.PassThrough();
|
||||
passThrough.end(buffer);
|
||||
return passThrough;
|
||||
};
|
||||
const passThrough = new stream.PassThrough();
|
||||
passThrough.end(buffer);
|
||||
uploadStream = passThrough;
|
||||
uploadFileSize = buffer.byteLength;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, uploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
|
||||
if (!result) {
|
||||
// chunk failed to upload
|
||||
isUploadSuccessful = false;
|
||||
@ -6726,7 +6685,7 @@ class UploadHttpClient {
|
||||
failedChunkSizes += chunkSize;
|
||||
continue;
|
||||
}
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, {
|
||||
const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, fs.createReadStream(uploadFilePath, {
|
||||
start,
|
||||
end,
|
||||
autoClose: false
|
||||
@ -6756,7 +6715,7 @@ class UploadHttpClient {
|
||||
* indicates a retryable status, we try to upload the chunk as well
|
||||
* @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
|
||||
* @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
|
||||
* @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
|
||||
* @param {NodeJS.ReadableStream} data Stream of the file that will be uploaded
|
||||
* @param {number} start Starting byte index of file that the chunk belongs to
|
||||
* @param {number} end Ending byte index of file that the chunk belongs to
|
||||
* @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
|
||||
@ -6764,13 +6723,13 @@ class UploadHttpClient {
|
||||
* @param {number} totalFileSize Original total size of the file that is being uploaded
|
||||
* @returns if the chunk was successfully uploaded
|
||||
*/
|
||||
uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||||
uploadChunk(httpClientIndex, resourceUrl, data, start, end, uploadFileSize, isGzip, totalFileSize) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// prepare all the necessary headers before making any http call
|
||||
const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize));
|
||||
const requestOptions = utils_1.getUploadRequestOptions('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize));
|
||||
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.uploadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.sendStream('PUT', resourceUrl, openStream(), headers);
|
||||
return yield client.sendStream('PUT', resourceUrl, data, requestOptions);
|
||||
});
|
||||
let retryCount = 0;
|
||||
const retryLimit = config_variables_1.getRetryLimit();
|
||||
@ -6848,7 +6807,7 @@ class UploadHttpClient {
|
||||
*/
|
||||
patchArtifactSize(size, artifactName) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const headers = utils_1.getUploadHeaders('application/json', false);
|
||||
const requestOptions = utils_1.getUploadRequestOptions('application/json', false);
|
||||
const resourceUrl = new url_1.URL(utils_1.getArtifactUrl());
|
||||
resourceUrl.searchParams.append('artifactName', artifactName);
|
||||
const parameters = { Size: size };
|
||||
@ -6856,7 +6815,7 @@ class UploadHttpClient {
|
||||
core.debug(`URL is ${resourceUrl.toString()}`);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.uploadHttpManager.getClient(0);
|
||||
const response = yield client.patch(resourceUrl.toString(), data, headers);
|
||||
const response = yield client.patch(resourceUrl.toString(), data, requestOptions);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
|
||||
core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
|
||||
@ -7289,8 +7248,8 @@ class DownloadHttpClient {
|
||||
const artifactUrl = utils_1.getArtifactUrl();
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(artifactUrl, headers);
|
||||
const requestOptions = utils_1.getDownloadRequestOptions('application/json');
|
||||
const response = yield client.get(artifactUrl, requestOptions);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
@ -7311,8 +7270,8 @@ class DownloadHttpClient {
|
||||
resourceUrl.searchParams.append('itemPath', artifactName);
|
||||
// use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
|
||||
const client = this.downloadHttpManager.getClient(0);
|
||||
const headers = utils_1.getDownloadHeaders('application/json');
|
||||
const response = yield client.get(resourceUrl.toString(), headers);
|
||||
const requestOptions = utils_1.getDownloadRequestOptions('application/json');
|
||||
const response = yield client.get(resourceUrl.toString(), requestOptions);
|
||||
const body = yield response.readBody();
|
||||
if (utils_1.isSuccessStatusCode(response.message.statusCode) && body) {
|
||||
return JSON.parse(body);
|
||||
@ -7369,16 +7328,15 @@ class DownloadHttpClient {
|
||||
let retryCount = 0;
|
||||
const retryLimit = config_variables_1.getRetryLimit();
|
||||
const destinationStream = fs.createWriteStream(downloadPath);
|
||||
const headers = utils_1.getDownloadHeaders('application/json', true, true);
|
||||
const requestOptions = utils_1.getDownloadRequestOptions('application/json', true, true);
|
||||
// a single GET request is used to download a file
|
||||
const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const client = this.downloadHttpManager.getClient(httpClientIndex);
|
||||
return yield client.get(artifactLocation, headers);
|
||||
return yield client.get(artifactLocation, requestOptions);
|
||||
});
|
||||
// check the response headers to determine if the file was compressed using gzip
|
||||
const isGzip = (incomingHeaders) => {
|
||||
return ('content-encoding' in incomingHeaders &&
|
||||
incomingHeaders['content-encoding'] === 'gzip');
|
||||
const isGzip = (headers) => {
|
||||
return ('content-encoding' in headers && headers['content-encoding'] === 'gzip');
|
||||
};
|
||||
// Increments the current retry count and then checks if the retry limit has been reached
|
||||
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
|
||||
@ -7854,9 +7812,9 @@ exports.getContentRange = getContentRange;
|
||||
* @param {boolean} isKeepAlive is the same connection being used to make multiple calls
|
||||
* @param {boolean} acceptGzip can we accept a gzip encoded response
|
||||
* @param {string} acceptType the type of content that we can accept
|
||||
* @returns appropriate headers to make a specific http call during artifact download
|
||||
* @returns appropriate request options to make a specific http call during artifact download
|
||||
*/
|
||||
function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) {
|
||||
function getDownloadRequestOptions(contentType, isKeepAlive, acceptGzip) {
|
||||
const requestOptions = {};
|
||||
if (contentType) {
|
||||
requestOptions['Content-Type'] = contentType;
|
||||
@ -7877,7 +7835,7 @@ function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) {
|
||||
}
|
||||
return requestOptions;
|
||||
}
|
||||
exports.getDownloadHeaders = getDownloadHeaders;
|
||||
exports.getDownloadRequestOptions = getDownloadRequestOptions;
|
||||
/**
|
||||
* Sets all the necessary headers when uploading an artifact
|
||||
* @param {string} contentType the type of content being uploaded
|
||||
@ -7886,9 +7844,9 @@ exports.getDownloadHeaders = getDownloadHeaders;
|
||||
* @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
|
||||
* @param {number} contentLength the length of the content that is being uploaded
|
||||
* @param {string} contentRange the range of the content that is being uploaded
|
||||
* @returns appropriate headers to make a specific http call during artifact upload
|
||||
* @returns appropriate request options to make a specific http call during artifact upload
|
||||
*/
|
||||
function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) {
|
||||
function getUploadRequestOptions(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) {
|
||||
const requestOptions = {};
|
||||
requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`;
|
||||
if (contentType) {
|
||||
@ -7911,9 +7869,9 @@ function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength,
|
||||
}
|
||||
return requestOptions;
|
||||
}
|
||||
exports.getUploadHeaders = getUploadHeaders;
|
||||
exports.getUploadRequestOptions = getUploadRequestOptions;
|
||||
function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/artifact', [
|
||||
return new http_client_1.HttpClient('action/artifact', [
|
||||
new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken())
|
||||
]);
|
||||
}
|
||||
@ -8333,10 +8291,12 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
let proxyVar;
|
||||
if (usingSsl) {
|
||||
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
|
||||
proxyVar = process.env["https_proxy"] ||
|
||||
process.env["HTTPS_PROXY"];
|
||||
}
|
||||
else {
|
||||
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
|
||||
proxyVar = process.env["http_proxy"] ||
|
||||
process.env["HTTP_PROXY"];
|
||||
}
|
||||
if (proxyVar) {
|
||||
proxyUrl = url.parse(proxyVar);
|
||||
@ -8348,7 +8308,7 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
}
|
||||
@ -8369,10 +8329,7 @@ function checkBypass(reqUrl) {
|
||||
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
|
||||
}
|
||||
// Compare request host against noproxy
|
||||
for (let upperNoProxyItem of noProxy
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
return true;
|
||||
}
|
||||
|
20
package-lock.json
generated
20
package-lock.json
generated
@ -1,13 +1,13 @@
|
||||
{
|
||||
"name": "upload-artifact",
|
||||
"version": "2.0.1",
|
||||
"version": "2.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/artifact": {
|
||||
"version": "0.3.2",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.3.2.tgz",
|
||||
"integrity": "sha512-KzUe5DEeVXprAodxfGKtx9f7ukuVKE6V6pge6t5GDGk0cdkfiMEfahoq7HfBsOsmVy4J7rr1YZQPUTvXveYinw==",
|
||||
"version": "0.3.1",
|
||||
"resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.3.1.tgz",
|
||||
"integrity": "sha512-czRvOioOpuvmF/qDevfVVpZeBt7pjYlrnmM1+tRuCpKJxjWFYgi5MIW7TfscyupXPvtJz9jIxMjvxy9Eug1QEA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@actions/core": "^1.2.1",
|
||||
@ -45,9 +45,9 @@
|
||||
}
|
||||
},
|
||||
"@actions/http-client": {
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.8.tgz",
|
||||
"integrity": "sha512-G4JjJ6f9Hb3Zvejj+ewLLKLf99ZC+9v+yCxoYf9vSyH+WkzPLB2LuUtRMGNkooMqdugGBFStIKXOuvH1W+EctA==",
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.7.tgz",
|
||||
"integrity": "sha512-PY3ys/XH5WMekkHyZhYSa/scYvlE5T/TV/T++vABHuY5ZRgtiBZkn2L2tV5Pv/xDCl59lSZb9WwRuWExDyAsSg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"tunnel": "0.0.6"
|
||||
@ -6520,9 +6520,9 @@
|
||||
}
|
||||
},
|
||||
"tmp-promise": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-2.1.1.tgz",
|
||||
"integrity": "sha512-Z048AOz/w9b6lCbJUpevIJpRpUztENl8zdv1bmAKVHimfqRFl92ROkmT9rp7TVBnrEw2gtMTol/2Cp2S2kJa4Q==",
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-2.0.2.tgz",
|
||||
"integrity": "sha512-zl71nFWjPKW2KXs+73gEk8RmqvtAeXPxhWDkTUoa3MSMkjq3I+9OeknjF178MQoMYsdqL730hfzvNfEkePxq9Q==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"tmp": "0.1.0"
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "upload-artifact",
|
||||
"version": "2.0.1",
|
||||
"version": "2.0.0",
|
||||
"description": "Upload a build artifact that can be used by subsequent workflow steps",
|
||||
"main": "dist/index.js",
|
||||
"scripts": {
|
||||
@ -29,7 +29,7 @@
|
||||
},
|
||||
"homepage": "https://github.com/actions/upload-artifact#readme",
|
||||
"devDependencies": {
|
||||
"@actions/artifact": "^0.3.2",
|
||||
"@actions/artifact": "^0.3.1",
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/glob": "^0.1.0",
|
||||
"@actions/io": "^1.0.2",
|
||||
|
@ -21,24 +21,16 @@ async function run(): Promise<void> {
|
||||
|
||||
const artifactClient = create()
|
||||
const options: UploadOptions = {
|
||||
continueOnError: false
|
||||
continueOnError: true
|
||||
}
|
||||
const uploadResponse = await artifactClient.uploadArtifact(
|
||||
await artifactClient.uploadArtifact(
|
||||
name || getDefaultArtifactName(),
|
||||
searchResult.filesToUpload,
|
||||
searchResult.rootDirectory,
|
||||
options
|
||||
)
|
||||
|
||||
if (uploadResponse.failedItems.length > 0) {
|
||||
core.setFailed(
|
||||
`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`
|
||||
)
|
||||
} else {
|
||||
core.info(
|
||||
`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`
|
||||
)
|
||||
}
|
||||
core.info('Artifact upload has finished successfully!')
|
||||
}
|
||||
} catch (err) {
|
||||
core.setFailed(err.message)
|
||||
|
Reference in New Issue
Block a user