mirror of
https://github.com/actions/cache.git
synced 2025-06-24 19:31:10 +02:00
Compare commits
17 Commits
pdotl-patc
...
v3.3.3
Author | SHA1 | Date | |
---|---|---|---|
e12d46a63a | |||
1baebfc3ba | |||
eb94f1a6bf | |||
704facf57e | |||
17e2888746 | |||
667d8fdfa2 | |||
f7ebb81a3f | |||
67b839edb6 | |||
57f0e3f198 | |||
04f198bf0b | |||
bd9b49b6c3 | |||
ea0503788c | |||
8f2671f18e | |||
6f1f1e10f3 | |||
4b8460cbff | |||
ef11f54eee | |||
4b381be638 |
6
.github/workflows/issue-opened-workflow.yml
vendored
6
.github/workflows/issue-opened-workflow.yml
vendored
@ -14,9 +14,3 @@ jobs:
|
||||
- name: add_assignees
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.issue.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
name: Add to Project Board
|
||||
with:
|
||||
project-url: https://github.com/orgs/actions/projects/12
|
||||
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||
|
6
.github/workflows/pr-opened-workflow.yml
vendored
6
.github/workflows/pr-opened-workflow.yml
vendored
@ -18,9 +18,3 @@ jobs:
|
||||
- name: Add Assignee
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github+json" -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN}}" https://api.github.com/repos/${{github.repository}}/issues/${{ github.event.pull_request.number}}/assignees -d '{"assignees":["${{steps.oncall.outputs.CURRENT}}"]}'
|
||||
|
||||
- uses: actions/add-to-project@v0.4.0
|
||||
name: Add to Project Board
|
||||
with:
|
||||
project-url: https://github.com/orgs/actions/projects/12
|
||||
github-token: ${{ secrets.CACHE_BOARD_TOKEN }}
|
||||
|
20
.licenses/npm/@actions/cache.dep.yml
generated
20
.licenses/npm/@actions/cache.dep.yml
generated
@ -1,20 +1,20 @@
|
||||
---
|
||||
name: "@actions/cache"
|
||||
version: 3.2.1
|
||||
version: 3.2.3
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
summary: Actions cache lib
|
||||
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
- sources: LICENSE.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright 2019 GitHub
|
||||
Copyright 2019 GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
notices: []
|
||||
|
2
.licenses/npm/@actions/http-client.dep.yml
generated
2
.licenses/npm/@actions/http-client.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@actions/http-client"
|
||||
version: 2.0.1
|
||||
version: 2.1.1
|
||||
type: npm
|
||||
summary: Actions Http Client
|
||||
homepage: https://github.com/actions/toolkit/tree/main/packages/http-client
|
||||
|
2
.licenses/npm/@azure/core-http.dep.yml
generated
2
.licenses/npm/@azure/core-http.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: "@azure/core-http"
|
||||
version: 3.0.0
|
||||
version: 3.0.4
|
||||
type: npm
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
|
7
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
7
.licenses/npm/@azure/ms-rest-js.dep.yml
generated
@ -1,9 +1,10 @@
|
||||
---
|
||||
name: "@azure/ms-rest-js"
|
||||
version: 2.6.1
|
||||
version: 2.7.0
|
||||
type: npm
|
||||
summary:
|
||||
homepage:
|
||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||
libraries generated using AutoRest
|
||||
homepage: https://github.com/Azure/ms-rest-js
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
|
34
.licenses/npm/ip-regex.dep.yml
generated
34
.licenses/npm/ip-regex.dep.yml
generated
@ -1,34 +0,0 @@
|
||||
---
|
||||
name: ip-regex
|
||||
version: 2.1.0
|
||||
type: npm
|
||||
summary: Regular expression for matching IP addresses (IPv4 & IPv6)
|
||||
homepage: https://github.com/sindresorhus/ip-regex#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: license
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
- sources: readme.md
|
||||
text: MIT © [Sindre Sorhus](https://sindresorhus.com)
|
||||
notices: []
|
43
.licenses/npm/psl.dep.yml
generated
43
.licenses/npm/psl.dep.yml
generated
@ -1,43 +0,0 @@
|
||||
---
|
||||
name: psl
|
||||
version: 1.8.0
|
||||
type: npm
|
||||
summary: Domain name parser based on the Public Suffix List
|
||||
homepage: https://github.com/lupomontero/psl#readme
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero lupomontero@gmail.com
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: |-
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Lupo Montero <lupomontero@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
notices: []
|
34
.licenses/npm/punycode.dep.yml
generated
34
.licenses/npm/punycode.dep.yml
generated
@ -1,34 +0,0 @@
|
||||
---
|
||||
name: punycode
|
||||
version: 2.1.1
|
||||
type: npm
|
||||
summary: A robust Punycode converter that fully complies to RFC 3492 and RFC 5891,
|
||||
and works on nearly all JavaScript platforms.
|
||||
homepage: https://mths.be/punycode
|
||||
license: mit
|
||||
licenses:
|
||||
- sources: LICENSE-MIT.txt
|
||||
text: |
|
||||
Copyright Mathias Bynens <https://mathiasbynens.be/>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
- sources: README.md
|
||||
text: Punycode.js is available under the [MIT](https://mths.be/mit) license.
|
||||
notices: []
|
4
.licenses/npm/semver.dep.yml
generated
4
.licenses/npm/semver.dep.yml
generated
@ -1,9 +1,9 @@
|
||||
---
|
||||
name: semver
|
||||
version: 6.3.0
|
||||
version: 6.3.1
|
||||
type: npm
|
||||
summary: The semantic version parser used by npm.
|
||||
homepage: https://github.com/npm/node-semver#readme
|
||||
homepage:
|
||||
license: isc
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
|
23
.licenses/npm/tough-cookie.dep.yml
generated
23
.licenses/npm/tough-cookie.dep.yml
generated
@ -1,23 +0,0 @@
|
||||
---
|
||||
name: tough-cookie
|
||||
version: 3.0.1
|
||||
type: npm
|
||||
summary: RFC6265 Cookies and Cookie Jar for node.js
|
||||
homepage: https://github.com/salesforce/tough-cookie
|
||||
license: bsd-3-clause
|
||||
licenses:
|
||||
- sources: LICENSE
|
||||
text: |
|
||||
Copyright (c) 2015, Salesforce.com, Inc.
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
notices: []
|
2
.licenses/npm/xml2js.dep.yml
generated
2
.licenses/npm/xml2js.dep.yml
generated
@ -1,6 +1,6 @@
|
||||
---
|
||||
name: xml2js
|
||||
version: 0.4.23
|
||||
version: 0.5.0
|
||||
type: npm
|
||||
summary: Simple XML to JavaScript object converter.
|
||||
homepage: https://github.com/Leonidas-from-XIV/node-xml2js
|
||||
|
@ -107,3 +107,12 @@
|
||||
### 3.3.1
|
||||
|
||||
- Reduced segment size to 128MB and segment timeout to 10 minutes to fail fast in case the cache download is stuck.
|
||||
|
||||
### 3.3.2
|
||||
|
||||
- Fixes bug with Azure SDK causing blob downloads to get stuck.
|
||||
|
||||
### 3.3.3
|
||||
|
||||
- Updates @actions/cache to v3.2.3 to fix accidental mutated path arguments to `getCacheVersion` [actions/toolkit#1378](https://github.com/actions/toolkit/pull/1378)
|
||||
- Additional audit fixes of npm package(s)
|
||||
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restore";
|
||||
import { restoreRun } from "../src/restoreImpl";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
@ -71,7 +71,7 @@ test("restore with no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -156,7 +156,7 @@ test("restore with cache found for key", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -201,7 +201,7 @@ test("restore with cache found for restore key", async () => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -246,7 +246,7 @@ test("Fail restore when fail on cache miss is enabled and primary + restore keys
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -289,7 +289,7 @@ test("restore when fail on cache miss is enabled and primary key doesn't match r
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -335,7 +335,7 @@ test("restore with fail on cache miss disabled and no cache found", async () =>
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreImpl";
|
||||
import { restoreImpl } from "../src/restoreImpl";
|
||||
import { StateProvider } from "../src/stateProvider";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
@ -60,7 +60,7 @@ test("restore with invalid event outputs warning", async () => {
|
||||
const invalidEvent = "commit_comment";
|
||||
process.env[Events.Key] = invalidEvent;
|
||||
delete process.env[RefKey];
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(logWarningMock).toHaveBeenCalledWith(
|
||||
`Event Validation Error: The event type ${invalidEvent} is not supported because it's not tied to a branch or tag ref.`
|
||||
);
|
||||
@ -76,7 +76,7 @@ test("restore without AC available should no-op", async () => {
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
@ -92,7 +92,7 @@ test("restore on GHES without AC available should no-op", async () => {
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
const setCacheHitOutputMock = jest.spyOn(core, "setOutput");
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
|
||||
@ -119,7 +119,7 @@ test("restore on GHES with AC available ", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -143,7 +143,7 @@ test("restore on GHES with AC available ", async () => {
|
||||
test("restore with no path should fail", async () => {
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
// this input isn't necessary for restore b/c tarball contains entries relative to workspace
|
||||
expect(failedMock).not.toHaveBeenCalledWith(
|
||||
@ -155,7 +155,7 @@ test("restore with no key", async () => {
|
||||
testUtils.setInput(Inputs.Path, "node_modules");
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(0);
|
||||
expect(failedMock).toHaveBeenCalledWith(
|
||||
"Input required and not supplied: key"
|
||||
@ -174,7 +174,7 @@ test("restore with too many keys should fail", async () => {
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
@ -200,7 +200,7 @@ test("restore with large key should fail", async () => {
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
@ -226,7 +226,7 @@ test("restore with invalid key should fail", async () => {
|
||||
});
|
||||
const failedMock = jest.spyOn(core, "setFailed");
|
||||
const restoreCacheMock = jest.spyOn(cache, "restoreCache");
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
[path],
|
||||
@ -260,7 +260,7 @@ test("restore with no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -301,7 +301,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -341,7 +341,7 @@ test("restore with cache found for key", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -383,7 +383,7 @@ test("restore with cache found for restore key", async () => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -424,7 +424,7 @@ test("restore with lookup-only set", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run(new StateProvider());
|
||||
await restoreImpl(new StateProvider());
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
@ -2,7 +2,7 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, RefKey } from "../src/constants";
|
||||
import run from "../src/restoreOnly";
|
||||
import { restoreOnlyRun } from "../src/restoreImpl";
|
||||
import * as actionUtils from "../src/utils/actionUtils";
|
||||
import * as testUtils from "../src/utils/testUtils";
|
||||
|
||||
@ -72,7 +72,7 @@ test("restore with no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -114,7 +114,7 @@ test("restore with restore keys and no cache found", async () => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -153,7 +153,7 @@ test("restore with cache found for key", async () => {
|
||||
return Promise.resolve(key);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
@ -196,7 +196,7 @@ test("restore with cache found for restore key", async () => {
|
||||
return Promise.resolve(restoreKey);
|
||||
});
|
||||
|
||||
await run();
|
||||
await restoreOnlyRun();
|
||||
|
||||
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
|
||||
expect(restoreCacheMock).toHaveBeenCalledWith(
|
||||
|
403
dist/restore-only/index.js
vendored
403
dist/restore-only/index.js
vendored
@ -1127,17 +1127,19 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@ -1155,7 +1157,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@ -3382,7 +3384,8 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@ -3394,10 +3397,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@ -3450,13 +3450,21 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@ -3489,9 +3497,7 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@ -4866,7 +4872,13 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
return new URL(proxyVar);
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@ -4877,6 +4889,10 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@ -4902,13 +4918,24 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -5557,7 +5584,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@ -5714,6 +5741,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@ -7136,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@ -7146,6 +7285,31 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@ -7155,14 +7319,14 @@ function tok (n) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@ -7204,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@ -7284,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@ -7293,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@ -7308,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@ -7329,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@ -7357,6 +7525,14 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@ -7381,7 +7557,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@ -7436,7 +7612,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@ -7881,6 +8057,7 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@ -7897,7 +8074,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@ -8021,9 +8198,16 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@ -8031,7 +8215,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@ -8050,20 +8234,19 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@ -8071,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@ -8171,7 +8354,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8212,7 +8395,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8271,7 +8454,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@ -8346,7 +8529,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@ -8672,7 +8855,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@ -8683,17 +8866,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@ -35837,6 +36020,19 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@ -36901,28 +37097,9 @@ exports.default = {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.NullStateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
const restoreImpl_1 = __webpack_require__(835);
|
||||
(0, restoreImpl_1.restoreOnlyRun)(true);
|
||||
|
||||
|
||||
/***/ }),
|
||||
@ -40310,7 +40487,8 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@ -40320,6 +40498,9 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@ -49096,9 +49277,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(694);
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
const utils = __importStar(__webpack_require__(360));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
@ -49149,7 +49332,40 @@ function restoreImpl(stateProvider) {
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.default = restoreImpl;
|
||||
exports.restoreImpl = restoreImpl;
|
||||
function run(stateProvider, earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield restoreImpl(stateProvider);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
function restoreOnlyRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreOnlyRun = restoreOnlyRun;
|
||||
function restoreRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreRun = restoreRun;
|
||||
|
||||
|
||||
/***/ }),
|
||||
@ -50089,14 +50305,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@ -50146,7 +50362,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@ -50170,7 +50390,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@ -50185,7 +50405,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@ -50202,7 +50422,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@ -52404,7 +52624,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@ -52412,12 +52632,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@ -54815,7 +55035,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@ -54955,7 +55175,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@ -55033,13 +55253,6 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@ -59720,7 +59933,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@ -59752,7 +59965,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@ -59765,7 +59978,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@ -59896,10 +60109,7 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@ -60132,6 +60342,10 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@ -60171,7 +60385,6 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
403
dist/restore/index.js
vendored
403
dist/restore/index.js
vendored
@ -1127,17 +1127,19 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@ -1155,7 +1157,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@ -3382,7 +3384,8 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@ -3394,10 +3397,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@ -3450,13 +3450,21 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@ -3489,9 +3497,7 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@ -4866,7 +4872,13 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
return new URL(proxyVar);
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@ -4877,6 +4889,10 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@ -4902,13 +4918,24 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -5557,7 +5584,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@ -5714,6 +5741,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@ -7136,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@ -7146,6 +7285,31 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@ -7155,14 +7319,14 @@ function tok (n) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@ -7204,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@ -7284,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@ -7293,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@ -7308,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@ -7329,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@ -7357,6 +7525,14 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@ -7381,7 +7557,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@ -7436,7 +7612,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@ -7881,6 +8057,7 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@ -7897,7 +8074,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@ -8021,9 +8198,16 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@ -8031,7 +8215,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@ -8050,20 +8234,19 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@ -8071,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@ -8171,7 +8354,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8212,7 +8395,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8271,7 +8454,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@ -8346,7 +8529,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@ -8672,7 +8855,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@ -8683,17 +8866,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@ -35745,6 +35928,19 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@ -40281,7 +40477,8 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@ -40291,6 +40488,9 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@ -47453,28 +47653,9 @@ module.exports = function(dst, src) {
|
||||
|
||||
"use strict";
|
||||
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const restoreImpl_1 = __importDefault(__webpack_require__(835));
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
function run() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield (0, restoreImpl_1.default)(new stateProvider_1.StateProvider());
|
||||
});
|
||||
}
|
||||
run();
|
||||
exports.default = run;
|
||||
const restoreImpl_1 = __webpack_require__(835);
|
||||
(0, restoreImpl_1.restoreRun)(true);
|
||||
|
||||
|
||||
/***/ }),
|
||||
@ -49096,9 +49277,11 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.restoreRun = exports.restoreOnlyRun = exports.restoreImpl = void 0;
|
||||
const cache = __importStar(__webpack_require__(692));
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const constants_1 = __webpack_require__(694);
|
||||
const stateProvider_1 = __webpack_require__(309);
|
||||
const utils = __importStar(__webpack_require__(443));
|
||||
function restoreImpl(stateProvider) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
@ -49149,7 +49332,40 @@ function restoreImpl(stateProvider) {
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.default = restoreImpl;
|
||||
exports.restoreImpl = restoreImpl;
|
||||
function run(stateProvider, earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
try {
|
||||
yield restoreImpl(stateProvider);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
}
|
||||
function restoreOnlyRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.NullStateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreOnlyRun = restoreOnlyRun;
|
||||
function restoreRun(earlyExit) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
yield run(new stateProvider_1.StateProvider(), earlyExit);
|
||||
});
|
||||
}
|
||||
exports.restoreRun = restoreRun;
|
||||
|
||||
|
||||
/***/ }),
|
||||
@ -50089,14 +50305,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@ -50146,7 +50362,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@ -50170,7 +50390,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@ -50185,7 +50405,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@ -50202,7 +50422,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@ -52404,7 +52624,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@ -52412,12 +52632,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@ -54815,7 +55035,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@ -54955,7 +55175,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@ -55033,13 +55253,6 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@ -59720,7 +59933,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@ -59752,7 +59965,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@ -59765,7 +59978,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@ -59896,10 +60109,7 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@ -60132,6 +60342,10 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@ -60171,7 +60385,6 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
343
dist/save-only/index.js
vendored
343
dist/save-only/index.js
vendored
@ -1183,17 +1183,19 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@ -1211,7 +1213,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@ -3438,7 +3440,8 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@ -3450,10 +3453,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@ -3506,13 +3506,21 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@ -3545,9 +3553,7 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@ -4922,7 +4928,13 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
return new URL(proxyVar);
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@ -4933,6 +4945,10 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@ -4958,13 +4974,24 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -5613,7 +5640,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@ -5770,6 +5797,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@ -7192,8 +7328,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@ -7202,6 +7341,31 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@ -7211,14 +7375,14 @@ function tok (n) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@ -7260,7 +7424,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@ -7340,6 +7504,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@ -7349,6 +7514,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@ -7364,6 +7530,7 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@ -7385,6 +7552,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@ -7413,6 +7581,14 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@ -7437,7 +7613,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@ -7492,7 +7668,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@ -7937,6 +8113,7 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@ -7953,7 +8130,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@ -8077,9 +8254,16 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@ -8087,7 +8271,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@ -8106,20 +8290,19 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@ -8127,7 +8310,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@ -8227,7 +8410,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8268,7 +8451,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8327,7 +8510,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@ -8402,7 +8585,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@ -8728,7 +8911,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@ -8739,17 +8922,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@ -35796,6 +35979,19 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@ -40422,7 +40618,8 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@ -40432,6 +40629,9 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@ -50112,14 +50312,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@ -50169,7 +50369,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@ -50193,7 +50397,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@ -50208,7 +50412,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@ -50225,7 +50429,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@ -52427,7 +52631,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@ -52435,12 +52639,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@ -54838,7 +55042,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@ -54978,7 +55182,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@ -55056,13 +55260,6 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@ -59743,7 +59940,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@ -59775,7 +59972,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@ -59788,7 +59985,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@ -59919,10 +60116,7 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@ -60155,6 +60349,10 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@ -60194,7 +60392,6 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
343
dist/save/index.js
vendored
343
dist/save/index.js
vendored
@ -1127,17 +1127,19 @@ function getArchiveFileSizeInBytes(filePath) {
|
||||
}
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
var _a, e_1, _b, _c;
|
||||
var _d;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const paths = [];
|
||||
const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||
const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd();
|
||||
const globber = yield glob.create(patterns.join('\n'), {
|
||||
implicitDescendants: false
|
||||
});
|
||||
try {
|
||||
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||
const file = _d.value;
|
||||
for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) {
|
||||
_c = _g.value;
|
||||
_e = false;
|
||||
const file = _c;
|
||||
const relativeFile = path
|
||||
.relative(workspace, file)
|
||||
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
|
||||
@ -1155,7 +1157,7 @@ function resolvePaths(patterns) {
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||
if (!_e && !_a && (_b = _f.return)) yield _b.call(_f);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
@ -3382,7 +3384,8 @@ function createHttpClient() {
|
||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
||||
}
|
||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||
const components = paths;
|
||||
// don't pass changes upstream
|
||||
const components = paths.slice();
|
||||
// Add compression method to cache version to restore
|
||||
// compressed cache as per compression method
|
||||
if (compressionMethod) {
|
||||
@ -3394,10 +3397,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false)
|
||||
}
|
||||
// Add salt to cache version to support breaking changes in cache entry
|
||||
components.push(versionSalt);
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(components.join('|'))
|
||||
.digest('hex');
|
||||
return crypto.createHash('sha256').update(components.join('|')).digest('hex');
|
||||
}
|
||||
exports.getCacheVersion = getCacheVersion;
|
||||
function getCacheEntry(keys, paths, options) {
|
||||
@ -3450,13 +3450,21 @@ function downloadCache(archiveLocation, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveUrl = new url_1.URL(archiveLocation);
|
||||
const downloadOptions = (0, options_1.getDownloadOptions)(options);
|
||||
if (downloadOptions.useAzureSdk &&
|
||||
archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) {
|
||||
if (downloadOptions.useAzureSdk) {
|
||||
// Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
|
||||
yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else if (downloadOptions.concurrentBlobDownloads) {
|
||||
// Use concurrent implementation with HttpClient to work around blob SDK issue
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions);
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Otherwise, download using the Actions http-client.
|
||||
yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath);
|
||||
}
|
||||
});
|
||||
@ -3489,9 +3497,7 @@ function getContentRange(start, end) {
|
||||
}
|
||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
core.debug(`Uploading chunk of size ${end -
|
||||
start +
|
||||
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||
const additionalHeaders = {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Content-Range': getContentRange(start, end)
|
||||
@ -4866,7 +4872,13 @@ function getProxyUrl(reqUrl) {
|
||||
}
|
||||
})();
|
||||
if (proxyVar) {
|
||||
return new URL(proxyVar);
|
||||
try {
|
||||
return new URL(proxyVar);
|
||||
}
|
||||
catch (_a) {
|
||||
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||
return new URL(`http://${proxyVar}`);
|
||||
}
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
@ -4877,6 +4889,10 @@ function checkBypass(reqUrl) {
|
||||
if (!reqUrl.hostname) {
|
||||
return false;
|
||||
}
|
||||
const reqHost = reqUrl.hostname;
|
||||
if (isLoopbackAddress(reqHost)) {
|
||||
return true;
|
||||
}
|
||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||
if (!noProxy) {
|
||||
return false;
|
||||
@ -4902,13 +4918,24 @@ function checkBypass(reqUrl) {
|
||||
.split(',')
|
||||
.map(x => x.trim().toUpperCase())
|
||||
.filter(x => x)) {
|
||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
||||
if (upperNoProxyItem === '*' ||
|
||||
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||
(upperNoProxyItem.startsWith('.') &&
|
||||
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
exports.checkBypass = checkBypass;
|
||||
function isLoopbackAddress(host) {
|
||||
const hostLower = host.toLowerCase();
|
||||
return (hostLower === 'localhost' ||
|
||||
hostLower.startsWith('127.') ||
|
||||
hostLower.startsWith('[::1]') ||
|
||||
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
|
||||
/***/ }),
|
||||
@ -5557,7 +5584,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
});
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0;
|
||||
const core = __importStar(__webpack_require__(470));
|
||||
const http_client_1 = __webpack_require__(425);
|
||||
const storage_blob_1 = __webpack_require__(373);
|
||||
@ -5714,6 +5741,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClient = downloadCacheHttpClient;
|
||||
/**
|
||||
* Download the cache using the Actions toolkit http-client concurrently
|
||||
*
|
||||
* @param archiveLocation the URL for the cache
|
||||
* @param archivePath the local path where the cache is saved
|
||||
*/
|
||||
function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) {
|
||||
var _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||
socketTimeout: options.timeoutInMs,
|
||||
keepAlive: true
|
||||
});
|
||||
try {
|
||||
const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); }));
|
||||
const lengthHeader = res.message.headers['content-length'];
|
||||
if (lengthHeader === undefined || lengthHeader === null) {
|
||||
throw new Error('Content-Length not found on blob response');
|
||||
}
|
||||
const length = parseInt(lengthHeader);
|
||||
if (Number.isNaN(length)) {
|
||||
throw new Error(`Could not interpret Content-Length: ${length}`);
|
||||
}
|
||||
const downloads = [];
|
||||
const blockSize = 4 * 1024 * 1024;
|
||||
for (let offset = 0; offset < length; offset += blockSize) {
|
||||
const count = Math.min(blockSize, length - offset);
|
||||
downloads.push({
|
||||
offset,
|
||||
promiseGetter: () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count);
|
||||
})
|
||||
});
|
||||
}
|
||||
// reverse to use .pop instead of .shift
|
||||
downloads.reverse();
|
||||
let actives = 0;
|
||||
let bytesDownloaded = 0;
|
||||
const progress = new DownloadProgress(length);
|
||||
progress.startDisplayTimer();
|
||||
const progressFn = progress.onProgress();
|
||||
const activeDownloads = [];
|
||||
let nextDownload;
|
||||
const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () {
|
||||
const segment = yield Promise.race(Object.values(activeDownloads));
|
||||
yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset);
|
||||
actives--;
|
||||
delete activeDownloads[segment.offset];
|
||||
bytesDownloaded += segment.count;
|
||||
progressFn({ loadedBytes: bytesDownloaded });
|
||||
});
|
||||
while ((nextDownload = downloads.pop())) {
|
||||
activeDownloads[nextDownload.offset] = nextDownload.promiseGetter();
|
||||
actives++;
|
||||
if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
while (actives > 0) {
|
||||
yield waitAndWrite();
|
||||
}
|
||||
}
|
||||
finally {
|
||||
httpClient.dispose();
|
||||
yield archiveDescriptor.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent;
|
||||
function downloadSegmentRetry(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const retries = 5;
|
||||
let failures = 0;
|
||||
while (true) {
|
||||
try {
|
||||
const timeout = 30000;
|
||||
const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count));
|
||||
if (typeof result === 'string') {
|
||||
throw new Error('downloadSegmentRetry failed due to timeout');
|
||||
}
|
||||
return result;
|
||||
}
|
||||
catch (err) {
|
||||
if (failures >= retries) {
|
||||
throw err;
|
||||
}
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function downloadSegment(httpClient, archiveLocation, offset, count) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () {
|
||||
return yield httpClient.get(archiveLocation, {
|
||||
Range: `bytes=${offset}-${offset + count - 1}`
|
||||
});
|
||||
}));
|
||||
if (!partRes.readBodyBuffer) {
|
||||
throw new Error('Expected HttpClientResponse to implement readBodyBuffer');
|
||||
}
|
||||
return {
|
||||
offset,
|
||||
count,
|
||||
buffer: yield partRes.readBodyBuffer()
|
||||
};
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Download the cache using the Azure Storage SDK. Only call this method if the
|
||||
* URL points to an Azure Storage endpoint.
|
||||
@ -7136,8 +7272,11 @@ var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER ||
|
||||
// Max safe segment length for coercion.
|
||||
var MAX_SAFE_COMPONENT_LENGTH = 16
|
||||
|
||||
var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6
|
||||
|
||||
// The actual regexps go on exports.re
|
||||
var re = exports.re = []
|
||||
var safeRe = exports.safeRe = []
|
||||
var src = exports.src = []
|
||||
var t = exports.tokens = {}
|
||||
var R = 0
|
||||
@ -7146,6 +7285,31 @@ function tok (n) {
|
||||
t[n] = R++
|
||||
}
|
||||
|
||||
var LETTERDASHNUMBER = '[a-zA-Z0-9-]'
|
||||
|
||||
// Replace some greedy regex tokens to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
var safeRegexReplacements = [
|
||||
['\\s', 1],
|
||||
['\\d', MAX_LENGTH],
|
||||
[LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH],
|
||||
]
|
||||
|
||||
function makeSafeRe (value) {
|
||||
for (var i = 0; i < safeRegexReplacements.length; i++) {
|
||||
var token = safeRegexReplacements[i][0]
|
||||
var max = safeRegexReplacements[i][1]
|
||||
value = value
|
||||
.split(token + '*').join(token + '{0,' + max + '}')
|
||||
.split(token + '+').join(token + '{1,' + max + '}')
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
// The following Regular Expressions can be used for tokenizing,
|
||||
// validating, and parsing SemVer version strings.
|
||||
|
||||
@ -7155,14 +7319,14 @@ function tok (n) {
|
||||
tok('NUMERICIDENTIFIER')
|
||||
src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*'
|
||||
tok('NUMERICIDENTIFIERLOOSE')
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '[0-9]+'
|
||||
src[t.NUMERICIDENTIFIERLOOSE] = '\\d+'
|
||||
|
||||
// ## Non-numeric Identifier
|
||||
// Zero or more digits, followed by a letter or hyphen, and then zero or
|
||||
// more letters, digits, or hyphens.
|
||||
|
||||
tok('NONNUMERICIDENTIFIER')
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
|
||||
src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*'
|
||||
|
||||
// ## Main Version
|
||||
// Three dot-separated numeric identifiers.
|
||||
@ -7204,7 +7368,7 @@ src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] +
|
||||
// Any combination of digits, letters, or hyphens.
|
||||
|
||||
tok('BUILDIDENTIFIER')
|
||||
src[t.BUILDIDENTIFIER] = '[0-9A-Za-z-]+'
|
||||
src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+'
|
||||
|
||||
// ## Build Metadata
|
||||
// Plus sign, followed by one or more period-separated build metadata
|
||||
@ -7284,6 +7448,7 @@ src[t.COERCE] = '(^|[^\\d])' +
|
||||
'(?:$|[^\\d])'
|
||||
tok('COERCERTL')
|
||||
re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g')
|
||||
safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g')
|
||||
|
||||
// Tilde ranges.
|
||||
// Meaning is "reasonably at or greater than"
|
||||
@ -7293,6 +7458,7 @@ src[t.LONETILDE] = '(?:~>?)'
|
||||
tok('TILDETRIM')
|
||||
src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+'
|
||||
re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g')
|
||||
safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g')
|
||||
var tildeTrimReplace = '$1~'
|
||||
|
||||
tok('TILDE')
|
||||
@ -7308,6 +7474,7 @@ src[t.LONECARET] = '(?:\\^)'
|
||||
tok('CARETTRIM')
|
||||
src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+'
|
||||
re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g')
|
||||
safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g')
|
||||
var caretTrimReplace = '$1^'
|
||||
|
||||
tok('CARET')
|
||||
@ -7329,6 +7496,7 @@ src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] +
|
||||
|
||||
// this one has to use the /g flag
|
||||
re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g')
|
||||
safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g')
|
||||
var comparatorTrimReplace = '$1$2$3'
|
||||
|
||||
// Something like `1.2.3 - 1.2.4`
|
||||
@ -7357,6 +7525,14 @@ for (var i = 0; i < R; i++) {
|
||||
debug(i, src[i])
|
||||
if (!re[i]) {
|
||||
re[i] = new RegExp(src[i])
|
||||
|
||||
// Replace all greedy whitespace to prevent regex dos issues. These regex are
|
||||
// used internally via the safeRe object since all inputs in this library get
|
||||
// normalized first to trim and collapse all extra whitespace. The original
|
||||
// regexes are exported for userland consumption and lower level usage. A
|
||||
// future breaking change could export the safer regex only with a note that
|
||||
// all input should have extra whitespace removed.
|
||||
safeRe[i] = new RegExp(makeSafeRe(src[i]))
|
||||
}
|
||||
}
|
||||
|
||||
@ -7381,7 +7557,7 @@ function parse (version, options) {
|
||||
return null
|
||||
}
|
||||
|
||||
var r = options.loose ? re[t.LOOSE] : re[t.FULL]
|
||||
var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]
|
||||
if (!r.test(version)) {
|
||||
return null
|
||||
}
|
||||
@ -7436,7 +7612,7 @@ function SemVer (version, options) {
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
|
||||
var m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL])
|
||||
var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL])
|
||||
|
||||
if (!m) {
|
||||
throw new TypeError('Invalid Version: ' + version)
|
||||
@ -7881,6 +8057,7 @@ function Comparator (comp, options) {
|
||||
return new Comparator(comp, options)
|
||||
}
|
||||
|
||||
comp = comp.trim().split(/\s+/).join(' ')
|
||||
debug('comparator', comp, options)
|
||||
this.options = options
|
||||
this.loose = !!options.loose
|
||||
@ -7897,7 +8074,7 @@ function Comparator (comp, options) {
|
||||
|
||||
var ANY = {}
|
||||
Comparator.prototype.parse = function (comp) {
|
||||
var r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var m = comp.match(r)
|
||||
|
||||
if (!m) {
|
||||
@ -8021,9 +8198,16 @@ function Range (range, options) {
|
||||
this.loose = !!options.loose
|
||||
this.includePrerelease = !!options.includePrerelease
|
||||
|
||||
// First, split based on boolean or ||
|
||||
// First reduce all whitespace as much as possible so we do not have to rely
|
||||
// on potentially slow regexes like \s*. This is then stored and used for
|
||||
// future error messages as well.
|
||||
this.raw = range
|
||||
this.set = range.split(/\s*\|\|\s*/).map(function (range) {
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.join(' ')
|
||||
|
||||
// First, split based on boolean or ||
|
||||
this.set = this.raw.split('||').map(function (range) {
|
||||
return this.parseRange(range.trim())
|
||||
}, this).filter(function (c) {
|
||||
// throw out any that are not relevant for whatever reason
|
||||
@ -8031,7 +8215,7 @@ function Range (range, options) {
|
||||
})
|
||||
|
||||
if (!this.set.length) {
|
||||
throw new TypeError('Invalid SemVer Range: ' + range)
|
||||
throw new TypeError('Invalid SemVer Range: ' + this.raw)
|
||||
}
|
||||
|
||||
this.format()
|
||||
@ -8050,20 +8234,19 @@ Range.prototype.toString = function () {
|
||||
|
||||
Range.prototype.parseRange = function (range) {
|
||||
var loose = this.options.loose
|
||||
range = range.trim()
|
||||
// `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4`
|
||||
var hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE]
|
||||
var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]
|
||||
range = range.replace(hr, hyphenReplace)
|
||||
debug('hyphen replace', range)
|
||||
// `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5`
|
||||
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, re[t.COMPARATORTRIM])
|
||||
range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace)
|
||||
debug('comparator trim', range, safeRe[t.COMPARATORTRIM])
|
||||
|
||||
// `~ 1.2.3` => `~1.2.3`
|
||||
range = range.replace(re[t.TILDETRIM], tildeTrimReplace)
|
||||
range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace)
|
||||
|
||||
// `^ 1.2.3` => `^1.2.3`
|
||||
range = range.replace(re[t.CARETTRIM], caretTrimReplace)
|
||||
range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace)
|
||||
|
||||
// normalize spaces
|
||||
range = range.split(/\s+/).join(' ')
|
||||
@ -8071,7 +8254,7 @@ Range.prototype.parseRange = function (range) {
|
||||
// At this point, the range is completely trimmed and
|
||||
// ready to be split into comparators.
|
||||
|
||||
var compRe = loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR]
|
||||
var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]
|
||||
var set = range.split(' ').map(function (comp) {
|
||||
return parseComparator(comp, this.options)
|
||||
}, this).join(' ').split(/\s+/)
|
||||
@ -8171,7 +8354,7 @@ function replaceTildes (comp, options) {
|
||||
}
|
||||
|
||||
function replaceTilde (comp, options) {
|
||||
var r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE]
|
||||
var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('tilde', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8212,7 +8395,7 @@ function replaceCarets (comp, options) {
|
||||
|
||||
function replaceCaret (comp, options) {
|
||||
debug('caret', comp, options)
|
||||
var r = options.loose ? re[t.CARETLOOSE] : re[t.CARET]
|
||||
var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]
|
||||
return comp.replace(r, function (_, M, m, p, pr) {
|
||||
debug('caret', comp, _, M, m, p, pr)
|
||||
var ret
|
||||
@ -8271,7 +8454,7 @@ function replaceXRanges (comp, options) {
|
||||
|
||||
function replaceXRange (comp, options) {
|
||||
comp = comp.trim()
|
||||
var r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE]
|
||||
var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]
|
||||
return comp.replace(r, function (ret, gtlt, M, m, p, pr) {
|
||||
debug('xRange', comp, ret, gtlt, M, m, p, pr)
|
||||
var xM = isX(M)
|
||||
@ -8346,7 +8529,7 @@ function replaceXRange (comp, options) {
|
||||
function replaceStars (comp, options) {
|
||||
debug('replaceStars', comp, options)
|
||||
// Looseness is ignored here. star is always as loose as it gets!
|
||||
return comp.trim().replace(re[t.STAR], '')
|
||||
return comp.trim().replace(safeRe[t.STAR], '')
|
||||
}
|
||||
|
||||
// This function is passed to string.replace(re[t.HYPHENRANGE])
|
||||
@ -8672,7 +8855,7 @@ function coerce (version, options) {
|
||||
|
||||
var match = null
|
||||
if (!options.rtl) {
|
||||
match = version.match(re[t.COERCE])
|
||||
match = version.match(safeRe[t.COERCE])
|
||||
} else {
|
||||
// Find the right-most coercible string that does not share
|
||||
// a terminus with a more left-ward coercible string.
|
||||
@ -8683,17 +8866,17 @@ function coerce (version, options) {
|
||||
// Stop when we get a match that ends at the string end, since no
|
||||
// coercible string can be more right-ward without the same terminus.
|
||||
var next
|
||||
while ((next = re[t.COERCERTL].exec(version)) &&
|
||||
while ((next = safeRe[t.COERCERTL].exec(version)) &&
|
||||
(!match || match.index + match[0].length !== version.length)
|
||||
) {
|
||||
if (!match ||
|
||||
next.index + next[0].length !== match.index + match[0].length) {
|
||||
match = next
|
||||
}
|
||||
re[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length
|
||||
}
|
||||
// leave it in a clean state
|
||||
re[t.COERCERTL].lastIndex = -1
|
||||
safeRe[t.COERCERTL].lastIndex = -1
|
||||
}
|
||||
|
||||
if (match === null) {
|
||||
@ -35740,6 +35923,19 @@ class HttpClientResponse {
|
||||
}));
|
||||
});
|
||||
}
|
||||
readBodyBuffer() {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||
const chunks = [];
|
||||
this.message.on('data', (chunk) => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
this.message.on('end', () => {
|
||||
resolve(Buffer.concat(chunks));
|
||||
});
|
||||
}));
|
||||
});
|
||||
}
|
||||
}
|
||||
exports.HttpClientResponse = HttpClientResponse;
|
||||
function isHttps(requestUrl) {
|
||||
@ -40366,7 +40562,8 @@ exports.getUploadOptions = getUploadOptions;
|
||||
*/
|
||||
function getDownloadOptions(copy) {
|
||||
const result = {
|
||||
useAzureSdk: true,
|
||||
useAzureSdk: false,
|
||||
concurrentBlobDownloads: true,
|
||||
downloadConcurrency: 8,
|
||||
timeoutInMs: 30000,
|
||||
segmentTimeoutInMs: 600000,
|
||||
@ -40376,6 +40573,9 @@ function getDownloadOptions(copy) {
|
||||
if (typeof copy.useAzureSdk === 'boolean') {
|
||||
result.useAzureSdk = copy.useAzureSdk;
|
||||
}
|
||||
if (typeof copy.concurrentBlobDownloads === 'boolean') {
|
||||
result.concurrentBlobDownloads = copy.concurrentBlobDownloads;
|
||||
}
|
||||
if (typeof copy.downloadConcurrency === 'number') {
|
||||
result.downloadConcurrency = copy.downloadConcurrency;
|
||||
}
|
||||
@ -50085,14 +50285,14 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
this.saxParser.onopentag = (function(_this) {
|
||||
return function(node) {
|
||||
var key, newValue, obj, processedKey, ref;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[charkey] = "";
|
||||
if (!_this.options.ignoreAttrs) {
|
||||
ref = node.attributes;
|
||||
for (key in ref) {
|
||||
if (!hasProp.call(ref, key)) continue;
|
||||
if (!(attrkey in obj) && !_this.options.mergeAttrs) {
|
||||
obj[attrkey] = {};
|
||||
obj[attrkey] = Object.create(null);
|
||||
}
|
||||
newValue = _this.options.attrValueProcessors ? processItem(_this.options.attrValueProcessors, node.attributes[key], key) : node.attributes[key];
|
||||
processedKey = _this.options.attrNameProcessors ? processItem(_this.options.attrNameProcessors, key) : key;
|
||||
@ -50142,7 +50342,11 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
}
|
||||
if (isEmpty(obj)) {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
if (typeof _this.options.emptyTag === 'function') {
|
||||
obj = _this.options.emptyTag();
|
||||
} else {
|
||||
obj = _this.options.emptyTag !== '' ? _this.options.emptyTag : emptyStr;
|
||||
}
|
||||
}
|
||||
if (_this.options.validator != null) {
|
||||
xpath = "/" + ((function() {
|
||||
@ -50166,7 +50370,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
}
|
||||
if (_this.options.explicitChildren && !_this.options.mergeAttrs && typeof obj === 'object') {
|
||||
if (!_this.options.preserveChildrenOrder) {
|
||||
node = {};
|
||||
node = Object.create(null);
|
||||
if (_this.options.attrkey in obj) {
|
||||
node[_this.options.attrkey] = obj[_this.options.attrkey];
|
||||
delete obj[_this.options.attrkey];
|
||||
@ -50181,7 +50385,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
obj = node;
|
||||
} else if (s) {
|
||||
s[_this.options.childkey] = s[_this.options.childkey] || [];
|
||||
objClone = {};
|
||||
objClone = Object.create(null);
|
||||
for (key in obj) {
|
||||
if (!hasProp.call(obj, key)) continue;
|
||||
objClone[key] = obj[key];
|
||||
@ -50198,7 +50402,7 @@ exports.propagation = propagation_1.PropagationAPI.getInstance();
|
||||
} else {
|
||||
if (_this.options.explicitRoot) {
|
||||
old = obj;
|
||||
obj = {};
|
||||
obj = Object.create(null);
|
||||
obj[nodeName] = old;
|
||||
}
|
||||
_this.resultObject = obj;
|
||||
@ -52400,7 +52604,7 @@ var CacheFilename;
|
||||
(function (CacheFilename) {
|
||||
CacheFilename["Gzip"] = "cache.tgz";
|
||||
CacheFilename["Zstd"] = "cache.tzst";
|
||||
})(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {}));
|
||||
})(CacheFilename || (exports.CacheFilename = CacheFilename = {}));
|
||||
var CompressionMethod;
|
||||
(function (CompressionMethod) {
|
||||
CompressionMethod["Gzip"] = "gzip";
|
||||
@ -52408,12 +52612,12 @@ var CompressionMethod;
|
||||
// This enum is for earlier version of zstd that does not have --long support
|
||||
CompressionMethod["ZstdWithoutLong"] = "zstd-without-long";
|
||||
CompressionMethod["Zstd"] = "zstd";
|
||||
})(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {}));
|
||||
})(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {}));
|
||||
var ArchiveToolType;
|
||||
(function (ArchiveToolType) {
|
||||
ArchiveToolType["GNU"] = "gnu";
|
||||
ArchiveToolType["BSD"] = "bsd";
|
||||
})(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {}));
|
||||
})(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {}));
|
||||
// The default number of retry attempts.
|
||||
exports.DefaultRetryAttempts = 2;
|
||||
// The default delay in milliseconds between retry attempts.
|
||||
@ -54811,7 +55015,7 @@ class HttpHeaders {
|
||||
set(headerName, headerValue) {
|
||||
this._headersMap[getHeaderKey(headerName)] = {
|
||||
name: headerName,
|
||||
value: headerValue.toString(),
|
||||
value: headerValue.toString().trim(),
|
||||
};
|
||||
}
|
||||
/**
|
||||
@ -54951,7 +55155,7 @@ const Constants = {
|
||||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "3.0.0",
|
||||
coreHttpVersion: "3.0.4",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
@ -55029,13 +55233,6 @@ const XML_CHARKEY = "_";
|
||||
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i;
|
||||
/**
|
||||
* A constant that indicates whether the environment is node.js or browser based.
|
||||
*/
|
||||
const isNode = typeof process !== "undefined" &&
|
||||
!!process.version &&
|
||||
!!process.versions &&
|
||||
!!process.versions.node;
|
||||
/**
|
||||
* Encodes an URI.
|
||||
*
|
||||
@ -59716,7 +59913,7 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) {
|
||||
factories.push(throttlingRetryPolicy());
|
||||
}
|
||||
factories.push(deserializationPolicy(options.deserializationContentTypes));
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
factories.push(proxyPolicy(options.proxySettings));
|
||||
}
|
||||
factories.push(logPolicy({ logger: logger.info }));
|
||||
@ -59748,7 +59945,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions);
|
||||
const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions);
|
||||
const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions);
|
||||
if (isNode) {
|
||||
if (coreUtil.isNode) {
|
||||
requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions));
|
||||
}
|
||||
const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions);
|
||||
@ -59761,7 +59958,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) {
|
||||
requestPolicyFactories.push(authPolicyFactory);
|
||||
}
|
||||
requestPolicyFactories.push(logPolicy(loggingOptions));
|
||||
if (isNode && pipelineOptions.decompressResponse === false) {
|
||||
if (coreUtil.isNode && pipelineOptions.decompressResponse === false) {
|
||||
requestPolicyFactories.push(disableResponseDecompressionPolicy());
|
||||
}
|
||||
return {
|
||||
@ -59892,10 +60089,7 @@ function flattenResponse(_response, responseSpec) {
|
||||
}
|
||||
function getCredentialScopes(options, baseUri) {
|
||||
if (options === null || options === void 0 ? void 0 : options.credentialScopes) {
|
||||
const scopes = options.credentialScopes;
|
||||
return Array.isArray(scopes)
|
||||
? scopes.map((scope) => new URL(scope).toString())
|
||||
: new URL(scopes).toString();
|
||||
return options.credentialScopes;
|
||||
}
|
||||
if (baseUri) {
|
||||
return `${baseUri}/.default`;
|
||||
@ -60128,6 +60322,10 @@ Object.defineProperty(exports, 'delay', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.delay; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isNode', {
|
||||
enumerable: true,
|
||||
get: function () { return coreUtil.isNode; }
|
||||
});
|
||||
Object.defineProperty(exports, 'isTokenCredential', {
|
||||
enumerable: true,
|
||||
get: function () { return coreAuth.isTokenCredential; }
|
||||
@ -60167,7 +60365,6 @@ exports.generateUuid = generateUuid;
|
||||
exports.getDefaultProxySettings = getDefaultProxySettings;
|
||||
exports.getDefaultUserAgentValue = getDefaultUserAgentValue;
|
||||
exports.isDuration = isDuration;
|
||||
exports.isNode = isNode;
|
||||
exports.isValidUuid = isValidUuid;
|
||||
exports.keepAlivePolicy = keepAlivePolicy;
|
||||
exports.logPolicy = logPolicy;
|
||||
|
33
examples.md
33
examples.md
@ -39,6 +39,7 @@
|
||||
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
|
||||
- [Swift - Swift Package Manager](#swift---swift-package-manager)
|
||||
- [Swift - Mint](#swift---mint)
|
||||
- [* - Bazel](#---bazel)
|
||||
|
||||
## C# - NuGet
|
||||
|
||||
@ -657,3 +658,35 @@ steps:
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mint-
|
||||
```
|
||||
|
||||
## * - Bazel
|
||||
|
||||
[`bazelisk`](https://github.com/bazelbuild/bazelisk) does not have be to separately downloaded and installed because it's already included in GitHub's `ubuntu-latest` and `macos-latest` base images.
|
||||
|
||||
### Linux
|
||||
|
||||
```yaml
|
||||
- name: Cache Bazel
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
~/.cache/bazel
|
||||
key: ${{ runner.os }}-bazel-${{ hashFiles('.bazelversion', '.bazelrc', 'WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bazel-
|
||||
- run: bazelisk test //...
|
||||
```
|
||||
|
||||
### macOS
|
||||
|
||||
```yaml
|
||||
- name: Cache Bazel
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
/private/var/tmp/_bazel_runner/
|
||||
key: ${{ runner.os }}-bazel-${{ hashFiles('.bazelversion', '.bazelrc', 'WORKSPACE', 'WORKSPACE.bazel', 'MODULE.bazel') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bazel-
|
||||
- run: bazelisk test //...
|
||||
```
|
||||
|
1170
package-lock.json
generated
1170
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cache",
|
||||
"version": "3.3.1",
|
||||
"version": "3.3.3",
|
||||
"private": true,
|
||||
"description": "Cache dependencies and build outputs",
|
||||
"main": "dist/restore/index.js",
|
||||
@ -23,7 +23,7 @@
|
||||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^3.2.1",
|
||||
"@actions/cache": "^3.2.3",
|
||||
"@actions/core": "^1.10.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2"
|
||||
|
@ -1,10 +1,3 @@
|
||||
import restoreImpl from "./restoreImpl";
|
||||
import { StateProvider } from "./stateProvider";
|
||||
import { restoreRun } from "./restoreImpl";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new StateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
restoreRun(true);
|
||||
|
@ -2,10 +2,14 @@ import * as cache from "@actions/cache";
|
||||
import * as core from "@actions/core";
|
||||
|
||||
import { Events, Inputs, Outputs, State } from "./constants";
|
||||
import { IStateProvider } from "./stateProvider";
|
||||
import {
|
||||
IStateProvider,
|
||||
NullStateProvider,
|
||||
StateProvider
|
||||
} from "./stateProvider";
|
||||
import * as utils from "./utils/actionUtils";
|
||||
|
||||
async function restoreImpl(
|
||||
export async function restoreImpl(
|
||||
stateProvider: IStateProvider
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
@ -82,4 +86,37 @@ async function restoreImpl(
|
||||
}
|
||||
}
|
||||
|
||||
export default restoreImpl;
|
||||
async function run(
|
||||
stateProvider: IStateProvider,
|
||||
earlyExit: boolean | undefined
|
||||
): Promise<void> {
|
||||
try {
|
||||
await restoreImpl(stateProvider);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
if (earlyExit) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// node will stay alive if any promises are not resolved,
|
||||
// which is a possibility if HTTP requests are dangling
|
||||
// due to retries or timeouts. We know that if we got here
|
||||
// that all promises that we care about have successfully
|
||||
// resolved, so simply exit with success.
|
||||
if (earlyExit) {
|
||||
process.exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
export async function restoreOnlyRun(
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<void> {
|
||||
await run(new NullStateProvider(), earlyExit);
|
||||
}
|
||||
|
||||
export async function restoreRun(
|
||||
earlyExit?: boolean | undefined
|
||||
): Promise<void> {
|
||||
await run(new StateProvider(), earlyExit);
|
||||
}
|
||||
|
@ -1,10 +1,3 @@
|
||||
import restoreImpl from "./restoreImpl";
|
||||
import { NullStateProvider } from "./stateProvider";
|
||||
import { restoreOnlyRun } from "./restoreImpl";
|
||||
|
||||
async function run(): Promise<void> {
|
||||
await restoreImpl(new NullStateProvider());
|
||||
}
|
||||
|
||||
run();
|
||||
|
||||
export default run;
|
||||
restoreOnlyRun(true);
|
||||
|
Reference in New Issue
Block a user