Compare commits

...

11 Commits

15 changed files with 336 additions and 113 deletions

View File

@ -21,7 +21,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ['3.7', '3.8', '3.9', 'pypy-3.7-v7.3.5', 'pypy-3.7-v7.x']
python-version: ['3.8', '3.9', 'pypy-3.7-v7.x']
steps:
- uses: actions/checkout@v2
- name: Setup Python
@ -39,18 +39,18 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ['3.7', '3.8', '3.9', 'pypy-3.7-v7.3.5', 'pypy-3.7-v7.x']
python-version: ['3.8', '3.9', 'pypy-3.7-v7.x']
steps:
- uses: actions/checkout@v2
- name: Install pipenv
run: pipx install pipenv
- name: Setup Python
uses: ./
with:
python-version: ${{ matrix.python-version }}
cache: 'pipenv'
- name: Install pipenv
run: pipx install pipenv
- name: Install dependencies
run: pipenv install flake8
run: pipenv install numpy
python-pip-dependencies-caching-path:
name: Test pip (Python ${{ matrix.python-version}}, ${{ matrix.os }})
@ -59,7 +59,7 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ['3.7', '3.8', '3.9', 'pypy-3.7-v7.3.5', 'pypy-3.7-v7.x']
python-version: ['3.8', '3.9', 'pypy-3.7-v7.x']
steps:
- uses: actions/checkout@v2
- name: Setup Python
@ -78,16 +78,16 @@ jobs:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python-version: ['3.7', '3.8', '3.9', 'pypy-3.7-v7.3.5', 'pypy-3.7-v7.x']
python-version: ['3.8', '3.9', 'pypy-3.7-v7.x']
steps:
- uses: actions/checkout@v2
- name: Install pipenv
run: pipx install pipenv
- name: Setup Python
uses: ./
with:
python-version: ${{ matrix.python-version }}
cache: 'pipenv'
cache-dependency-path: '**/requirements-linux.txt'
- name: Install pipenv
run: pipx install pipenv
- name: Install dependencies
run: pipenv install flake8
run: pipenv install numpy

View File

@ -14,11 +14,15 @@ jobs:
name: Check licenses
steps:
- uses: actions/checkout@v2
- name: Set Node.js 12.x
uses: actions/setup-node@v2
with:
node-version: 12.x
- run: npm ci
- name: Install licensed
run: |
cd $RUNNER_TEMP
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/2.12.2/licensed-2.12.2-linux-x64.tar.gz
curl -Lfs -o licensed.tar.gz https://github.com/github/licensed/releases/download/3.3.1/licensed-3.3.1-linux-x64.tar.gz
sudo tar -xzf licensed.tar.gz
sudo mv licensed /usr/local/bin/licensed
- run: licensed status

View File

@ -97,7 +97,7 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [macos-10.15, windows-latest, ubuntu-18.04, ubuntu-20.04]
os: [macos-10.15, windows-2019, ubuntu-18.04, ubuntu-20.04]
steps:
- name: Checkout
uses: actions/checkout@v2

View File

@ -1,6 +1,6 @@
---
name: "@actions/cache"
version: 1.0.7
version: 1.0.8
type: npm
summary: Actions cache lib
homepage: https://github.com/actions/toolkit/tree/main/packages/cache

View File

@ -1,6 +1,6 @@
---
name: "@azure/core-http"
version: 2.2.1
version: 2.2.2
type: npm
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
libraries generated using AutoRest

View File

@ -1,6 +1,6 @@
---
name: node-fetch
version: 2.6.5
version: 2.6.7
type: npm
summary: A light-weight module that brings window.fetch to node.js
homepage: https://github.com/bitinn/node-fetch

View File

@ -43,7 +43,7 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ '2.x', '3.x', 'pypy-2.7', 'pypy-3.6', 'pypy-3.7' ]
python-version: [ '2.x', '3.x', 'pypy-2.7', 'pypy-3.7', 'pypy-3.8' ]
name: Python ${{ matrix.python-version }} sample
steps:
- uses: actions/checkout@v2
@ -63,7 +63,7 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: ['2.7', '3.6', '3.7', '3.8', 'pypy-2.7', 'pypy-3.6']
python-version: ['2.7', '3.7', '3.8', '3.9', '3.10', 'pypy-2.7', 'pypy-3.8']
exclude:
- os: macos-latest
python-version: '3.8'
@ -76,7 +76,7 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: Display Python version
run: python -c "import sys; print(sys.version)"
run: python --version
```
Download and set up a version of Python that does not come preinstalled on an image:
@ -87,7 +87,7 @@ jobs:
strategy:
matrix:
# in this example, there is a newer version already installed, 3.7.7, so the older version will be downloaded
python-version: ['3.5', '3.6', '3.7.4', '3.8']
python-version: ['3.7.4', '3.8', '3.9', '3.10']
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
@ -102,7 +102,7 @@ steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.9.0-beta.4'
python-version: '3.11.0-alpha.1'
- run: python my_script.py
```
@ -112,7 +112,7 @@ steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: '3.9.0-alpha - 3.9.0' # SemVer's version range syntax
python-version: '3.11.0-alpha - 3.11.0' # SemVer's version range syntax
- run: python my_script.py
```
@ -125,9 +125,9 @@ jobs:
strategy:
matrix:
python-version:
- 'pypy-3.6' # the latest available version of PyPy that supports Python 3.6
- 'pypy-3.7' # the latest available version of PyPy that supports Python 3.7
- 'pypy-3.7-v7.3.3' # Python 3.7 and PyPy 7.3.3
- 'pypy-3.8' # the latest available version of PyPy that supports Python 3.8
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
@ -146,7 +146,7 @@ Check out our detailed guide on using [Python with GitHub Actions](https://help.
`setup-python` is able to configure Python from two sources:
- Preinstalled versions of Python in the tools cache on GitHub-hosted runners.
- For detailed information regarding the available versions of Python that are installed see [Supported software](https://docs.github.com/en/actions/reference/specifications-for-github-hosted-runners#supported-software).
- For detailed information regarding the available versions of Python that are installed, see [Supported software](https://docs.github.com/en/actions/reference/specifications-for-github-hosted-runners#supported-software).
- For every minor version of Python, expect only the latest patch to be preinstalled.
- If `3.8.1` is installed for example, and `3.8.2` is released, expect `3.8.1` to be removed and replaced by `3.8.2` in the tools cache.
- If the exact patch version doesn't matter to you, specifying just the major and minor version will get you the latest preinstalled patch version. In the previous example, the version spec `3.8` will use the `3.8.2` Python version found in the cache.
@ -159,9 +159,9 @@ Check out our detailed guide on using [Python with GitHub Actions](https://help.
`setup-python` is able to configure PyPy from two sources:
- Preinstalled versions of PyPy in the tools cache on GitHub-hosted runners
- For detailed information regarding the available versions of PyPy that are installed see [Supported software](https://docs.github.com/en/actions/reference/specifications-for-github-hosted-runners#supported-software).
- For detailed information regarding the available versions of PyPy that are installed, see [Supported software](https://docs.github.com/en/actions/reference/specifications-for-github-hosted-runners#supported-software).
- For the latest PyPy release, all versions of Python are cached.
- Cache is updated with a 1-2 week delay. If you specify the PyPy version as `pypy-3.6`, the cached version will be used although a newer version is available. If you need to start using the recently released version right after release, you should specify the exact PyPy version using `pypy-3.6-v7.3.3`.
- Cache is updated with a 1-2 week delay. If you specify the PyPy version as `pypy-3.7`, the cached version will be used although a newer version is available. If you need to start using the recently released version right after release, you should specify the exact PyPy version using `pypy-3.7-v7.3.3`.
- Downloadable PyPy versions from the [official PyPy site](https://downloads.python.org/pypy/).
- All available versions that we can download are listed in [versions.json](https://downloads.python.org/pypy/versions.json) file.
@ -198,8 +198,8 @@ The version of PyPy should be specified in the format `pypy-<python_version>[-v<
The `<pypy_version>` parameter is optional and can be skipped. The latest version will be used in this case.
```
pypy-3.6 # the latest available version of PyPy that supports Python 3.6
pypy-3.7 # the latest available version of PyPy that supports Python 3.7
pypy-3.8 # the latest available version of PyPy that supports Python 3.8
pypy-2.7 # the latest available version of PyPy that supports Python 2.7
pypy-3.7-v7.3.3 # Python 3.7 and PyPy 7.3.3
pypy-3.7-v7.x # Python 3.7 and the latest available PyPy 7.x
@ -230,7 +230,6 @@ steps:
python-version: '3.9'
cache: 'pip'
- run: pip install -r requirements.txt
- run: pip test
```
**Caching pipenv dependencies:**
@ -244,7 +243,6 @@ steps:
python-version: '3.9'
cache: 'pipenv'
- run: pipenv install
- run: pipenv test
```
**Using wildcard patterns to cache dependencies**
@ -257,7 +255,6 @@ steps:
cache: 'pip'
cache-dependency-path: '**/requirements-dev.txt'
- run: pip install -r subdirectory/requirements-dev.txt
- run: pip test
```
**Using a list of file paths to cache dependencies**
@ -274,7 +271,6 @@ steps:
server/app/Pipfile.lock
__test__/app/Pipfile.lock
- run: pipenv install
- run: pipenv test
```
# Using `setup-python` with a self hosted runner
@ -304,7 +300,7 @@ If you are experiencing problems while configuring Python on your self-hosted ru
- The user starting the runner is in the owning group, and the owning group has write permission.
- All users have write permission.
- One quick way to grant access is to change the user and group of `/opt/hostedtoolcache` to be the same as the runners using `chown`.
- `sudo chown runner-user:runner-group opt/hostedtoolcache/`.
- `sudo chown runner-user:runner-group /opt/hostedtoolcache/`.
- If your runner is configured as a service and you run into problems, make sure the user that the service is running as is correct. For more information, you can [check the status of your self-hosted runner](https://help.github.com/en/actions/hosting-your-own-runners/configuring-the-self-hosted-runner-application-as-a-service#checking-the-status-of-the-service).
### Mac

View File

@ -92,15 +92,12 @@ describe('restore-cache', () => {
dependencyFile
);
await cacheDistributor.restoreCache();
let pythonKey = '';
if (packageManager === 'pipenv') {
pythonKey = `python-${pythonVersion}-`;
}
expect(infoSpy).toHaveBeenCalledWith(
`Cache restored from key: setup-python-${process.env['RUNNER_OS']}-${pythonKey}${packageManager}-${fileHash}`
`Cache restored from key: setup-python-${process.env['RUNNER_OS']}-python-${pythonVersion}-${packageManager}-${fileHash}`
);
}
},
30000
);
it.each([

View File

@ -1041,10 +1041,10 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeIsBytes(filePath) {
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) {
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
});
const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL;
const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
@ -34585,9 +34585,17 @@ AbortError.prototype = Object.create(Error.prototype);
AbortError.prototype.constructor = AbortError;
AbortError.prototype.name = 'AbortError';
const URL$1 = Url.URL || whatwgUrl.URL;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream.PassThrough;
const resolve_url = Url.resolve;
const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
const orig = new URL$1(original).hostname;
const dest = new URL$1(destination).hostname;
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
};
/**
* Fetch function
@ -34675,7 +34683,19 @@ function fetch(url, opts) {
const location = headers.get('Location');
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve_url(request.url, location);
let locationURL = null;
try {
locationURL = location === null ? null : new URL$1(location, request.url).toString();
} catch (err) {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if (request.redirect !== 'manual') {
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
finalize();
return;
}
}
// HTTP fetch step 5.5
switch (request.redirect) {
@ -34723,6 +34743,12 @@ function fetch(url, opts) {
size: request.size
};
if (!isDomainOrSubdomain(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOpts.headers.delete(name);
}
}
// HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
@ -41451,7 +41477,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
@ -41496,18 +41522,29 @@ function saveCache(paths, key, options) {
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
return cacheId;
});
}
@ -53218,7 +53255,12 @@ class HttpHeaders {
* Create a deep clone/copy of this HttpHeaders collection.
*/
clone() {
return new HttpHeaders(this.rawHeaders());
const resultPreservingCasing = {};
for (const headerKey in this._headersMap) {
const header = this._headersMap[headerKey];
resultPreservingCasing[header.name] = header.value;
}
return new HttpHeaders(resultPreservingCasing);
}
}
@ -53255,7 +53297,7 @@ const Constants = {
/**
* The core-http version
*/
coreHttpVersion: "2.2.1",
coreHttpVersion: "2.2.2",
/**
* Specifies HTTP.
*/
@ -55568,7 +55610,7 @@ class FetchHttpClient {
}
let downloadStreamDone = Promise.resolve();
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
}
Promise.all([uploadStreamDone, downloadStreamDone])
.then(() => {
@ -55586,11 +55628,14 @@ class FetchHttpClient {
function isReadableStream(body) {
return body && typeof body.pipe === "function";
}
function isStreamComplete(stream) {
function isStreamComplete(stream, aborter) {
return new Promise((resolve) => {
stream.on("close", resolve);
stream.on("end", resolve);
stream.on("error", resolve);
stream.once("close", () => {
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
resolve();
});
stream.once("end", resolve);
stream.once("error", resolve);
});
}
function parseHeaders(headers) {

129
dist/setup/index.js vendored
View File

@ -1041,10 +1041,10 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeIsBytes(filePath) {
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) {
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
@ -34463,16 +34463,38 @@ Object.defineProperty(exports, "__esModule", { value: true });
const glob = __importStar(__webpack_require__(281));
const core = __importStar(__webpack_require__(470));
const exec = __importStar(__webpack_require__(986));
const child_process = __importStar(__webpack_require__(129));
const util_1 = __importDefault(__webpack_require__(669));
const path = __importStar(__webpack_require__(622));
const os_1 = __importDefault(__webpack_require__(87));
const cache_distributor_1 = __importDefault(__webpack_require__(435));
const utils_1 = __webpack_require__(163);
class PipCache extends cache_distributor_1.default {
constructor(cacheDependencyPath = '**/requirements.txt') {
constructor(pythonVersion, cacheDependencyPath = '**/requirements.txt') {
super('pip', cacheDependencyPath);
this.pythonVersion = pythonVersion;
}
getCacheGlobalDirectories() {
return __awaiter(this, void 0, void 0, function* () {
const { stdout, stderr, exitCode } = yield exec.getExecOutput('pip cache dir');
let exitCode = 1;
let stdout = '';
let stderr = '';
// Add temporary fix for Windows
// On windows it is necessary to execute through an exec
// because the getExecOutput gives a non zero code or writes to stderr for pip 22.0.2,
// or spawn must be started with the shell option enabled for getExecOutput
// Related issue: https://github.com/actions/setup-python/issues/328
if (utils_1.IS_WINDOWS) {
const execPromisify = util_1.default.promisify(child_process.exec);
({ stdout: stdout, stderr: stderr } = yield execPromisify('pip cache dir'));
}
else {
({
stdout: stdout,
stderr: stderr,
exitCode: exitCode
} = yield exec.getExecOutput('pip cache dir'));
}
if (exitCode && stderr) {
throw new Error(`Could not get cache folder path for pip package manager`);
}
@ -34487,8 +34509,8 @@ class PipCache extends cache_distributor_1.default {
computeKeys() {
return __awaiter(this, void 0, void 0, function* () {
const hash = yield glob.hashFiles(this.cacheDependencyPath);
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-${this.packageManager}-${hash}`;
const restoreKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-${this.packageManager}`;
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}-${hash}`;
const restoreKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}`;
return {
primaryKey,
restoreKey: [restoreKey]
@ -36873,7 +36895,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
});
const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL;
const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
@ -37136,9 +37158,17 @@ AbortError.prototype = Object.create(Error.prototype);
AbortError.prototype.constructor = AbortError;
AbortError.prototype.name = 'AbortError';
const URL$1 = Url.URL || whatwgUrl.URL;
// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
const PassThrough$1 = Stream.PassThrough;
const resolve_url = Url.resolve;
const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {
const orig = new URL$1(original).hostname;
const dest = new URL$1(destination).hostname;
return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);
};
/**
* Fetch function
@ -37226,7 +37256,19 @@ function fetch(url, opts) {
const location = headers.get('Location');
// HTTP fetch step 5.3
const locationURL = location === null ? null : resolve_url(request.url, location);
let locationURL = null;
try {
locationURL = location === null ? null : new URL$1(location, request.url).toString();
} catch (err) {
// error here can only be invalid URL in Location: header
// do not throw when options.redirect == manual
// let the user extract the errorneous redirect URL
if (request.redirect !== 'manual') {
reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));
finalize();
return;
}
}
// HTTP fetch step 5.5
switch (request.redirect) {
@ -37274,6 +37316,12 @@ function fetch(url, opts) {
size: request.size
};
if (!isDomainOrSubdomain(request.url, locationURL)) {
for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {
requestOpts.headers.delete(name);
}
}
// HTTP-redirect fetch step 9
if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
@ -43888,7 +43936,7 @@ var PackageManagers;
function getCacheDistributor(packageManager, pythonVersion, cacheDependencyPath) {
switch (packageManager) {
case PackageManagers.Pip:
return new pip_cache_1.default(cacheDependencyPath);
return new pip_cache_1.default(pythonVersion, cacheDependencyPath);
case PackageManagers.Pipenv:
return new pipenv_cache_1.default(pythonVersion, cacheDependencyPath);
default:
@ -47198,7 +47246,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
@ -47243,18 +47291,29 @@ function saveCache(paths, key, options) {
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
return cacheId;
});
}
@ -59947,7 +60006,12 @@ class HttpHeaders {
* Create a deep clone/copy of this HttpHeaders collection.
*/
clone() {
return new HttpHeaders(this.rawHeaders());
const resultPreservingCasing = {};
for (const headerKey in this._headersMap) {
const header = this._headersMap[headerKey];
resultPreservingCasing[header.name] = header.value;
}
return new HttpHeaders(resultPreservingCasing);
}
}
@ -59984,7 +60048,7 @@ const Constants = {
/**
* The core-http version
*/
coreHttpVersion: "2.2.1",
coreHttpVersion: "2.2.2",
/**
* Specifies HTTP.
*/
@ -62297,7 +62361,7 @@ class FetchHttpClient {
}
let downloadStreamDone = Promise.resolve();
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
}
Promise.all([uploadStreamDone, downloadStreamDone])
.then(() => {
@ -62315,11 +62379,14 @@ class FetchHttpClient {
function isReadableStream(body) {
return body && typeof body.pipe === "function";
}
function isStreamComplete(stream) {
function isStreamComplete(stream, aborter) {
return new Promise((resolve) => {
stream.on("close", resolve);
stream.on("end", resolve);
stream.on("error", resolve);
stream.once("close", () => {
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
resolve();
});
stream.once("end", resolve);
stream.once("error", resolve);
});
}
function parseHeaders(headers) {

View File

@ -0,0 +1,93 @@
## 0. Caching dependencies
Date: 2021-10-01
Status: Accepted
## Context
`actions/setup-python` is one the most popular python's action in GitHub Actions. A lot of customers use it in conjunction with `actions/cache` to speed up dependencies installation.
See more examples on proper usage in [actions/cache documentation](https://github.com/actions/cache/blob/main/examples.md#python---pip).
## Goals & Anti-Goals
Integration of caching functionality into `actions/setup-python` action will bring the following benefits for action users:
- Decrease the entry threshold for using the cache for Python dependencies and simplify initial configuration
- Simplify YAML pipelines by reducing the need for additional steps to enable caching
- More users will use caching for Python so users will have faster builds!
We will add support for Pip and Pipenv dependency caching.
We won't pursue the goal to provide wide customization of caching in the scope of `actions/setup-python` action. The purpose of this integration is to cover ~90% of basic use-cases. If users need flexible customization, we will advise them to use `actions/cache` directly.
## Decision
- Add a `cache` input parameter to `actions/setup-python`. For now the input will accept the following values:
- pip - enable caching for pip dependencies
- pipenv - enable caching for pipenv dependencies
- '' - disable caching (default value)
- Cache feature will be disabled by default to make sure that we don't break existing customers.
- Action will try to search dependencies files (requirements.txt for pip and Pipfile.lock for pipenv) in the repository root (or relative to the repository root, if patterns are used) and throw error if no one is found.
- The hash of found file will be used as part of cache key (the same approach like actions/cache recommends)
- The following cache key will be used for pip: `setup-python-${{ runner.os }}-pip-${{ hashFiles('<package-file-path>') }}`
- The following cache key will be used for pipenv: `setup-python-${{ runner.os }}-python-${{ python-version }}-pipenv-${{ hashFiles('<package-file-path>') }}`. We add the python version to the cache key because the created virtualenv folder with the project name contains a copy of the python binary as a symlink to paths like `/opt/hostedtoolcache/Python/3.7.11`, so the cache can be fetched with a wrong python version. See details in the related [pull request](https://github.com/actions/cache/pull/607) in the actions/cache.
- Action will save the packages global cache:
- Pip (retrieved via pip cache dir). The command is available With pip 20.1 or later. We always update pip during installation, that is why this command should be available.
- Pipenv (default cache paths):
- ~/.local/share/virtualenvs (macOS)
- ~/.virtualenvs (Windows)
- ~/.local/share/virtualenvs (Ubuntu)
- Add a `cache-dependency-path` input parameter to `actions/setup-python`. The new input will accept an array or regex of dependency files. The field will accept a path (relative to the repository root) to dependency files. If the provided path contains wildcards, the action will search all matching files and calculate a common hash like the `${{ hashFiles('**/requirements-dev.txt') }}` YAML construction does
## Example of real use-cases
- Pip package manager
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pip
```
- Pipenv package manager
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pipenv
```
- With `cache-dependency-path`
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pip
cache-dependency-path: |
**/requirements-dev.txt
**/requirements-test.txt
**/requirements.txt
```
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pip
cache-dependency-path: **/requirements-dev.txt
```
## Release process
As soon as the functionality is implemented, we will release a minor update of the action. No need to bump the major version since there are no breaking changes for existing users. After that, we will update [starter-workflows](https://github.com/actions/starter-workflows/blob/main/ci/python-app.yml) and [GitHub Action documentation](https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#caching-dependencies).

18
package-lock.json generated
View File

@ -5,9 +5,9 @@
"requires": true,
"dependencies": {
"@actions/cache": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.7.tgz",
"integrity": "sha512-MY69kxuubqUFq84pFlu8m6Poxl5sR/xyhpC4JEvno7Yg9ASYdGizEmKgt0m8ovewpYKf15UAOcSC0hzS+DuosA==",
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.8.tgz",
"integrity": "sha512-GWNNB67w93HGJRQXlsV56YqrdAuDoP3esK/mo5mzU8WoDCVjtQgJGsTdkYUX7brswtT7xnI30bWNo1WLKQ8FZQ==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -136,9 +136,9 @@
}
},
"@azure/core-http": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.1.tgz",
"integrity": "sha512-7ATnV3OGzCO2K9kMrh3NKUM8b4v+xasmlUhkNZz6uMbm+8XH/AexLkhRGsoo0GyKNlEGvyGEfytqTk0nUY2I4A==",
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.2.tgz",
"integrity": "sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-asynciterator-polyfill": "^1.0.0",
@ -8137,9 +8137,9 @@
"dev": true
},
"node-fetch": {
"version": "2.6.5",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.5.tgz",
"integrity": "sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ==",
"version": "2.6.7",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
"requires": {
"whatwg-url": "^5.0.0"
},

View File

@ -5,7 +5,7 @@
"description": "Setup python action",
"main": "dist/index.js",
"scripts": {
"build": "tsc",
"build": "ncc build -o dist/setup src/setup-python.ts && ncc build -o dist/cache-save src/cache-save.ts",
"format": "prettier --write \"{,!(node_modules)/**/}*.ts\"",
"format-check": "prettier --check \"{,!(node_modules)/**/}*.ts\"",
"release": "ncc build -o dist/setup src/setup-python.ts && ncc build -o dist/cache-save src/cache-save.ts && git add -f dist/",
@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^1.0.7",
"@actions/cache": "^1.0.8",
"@actions/core": "^1.2.3",
"@actions/exec": "^1.1.0",
"@actions/glob": "^0.2.0",

View File

@ -13,7 +13,7 @@ export function getCacheDistributor(
) {
switch (packageManager) {
case PackageManagers.Pip:
return new PipCache(cacheDependencyPath);
return new PipCache(pythonVersion, cacheDependencyPath);
case PackageManagers.Pipenv:
return new PipenvCache(pythonVersion, cacheDependencyPath);
default:

View File

@ -1,21 +1,42 @@
import * as glob from '@actions/glob';
import * as core from '@actions/core';
import * as exec from '@actions/exec';
import * as child_process from 'child_process';
import utils from 'util';
import * as path from 'path';
import os from 'os';
import CacheDistributor from './cache-distributor';
import {IS_WINDOWS} from '../utils';
class PipCache extends CacheDistributor {
constructor(cacheDependencyPath: string = '**/requirements.txt') {
constructor(
private pythonVersion: string,
cacheDependencyPath: string = '**/requirements.txt'
) {
super('pip', cacheDependencyPath);
}
protected async getCacheGlobalDirectories() {
const {stdout, stderr, exitCode} = await exec.getExecOutput(
'pip cache dir'
);
let exitCode = 1;
let stdout = '';
let stderr = '';
// Add temporary fix for Windows
// On windows it is necessary to execute through an exec
// because the getExecOutput gives a non zero code or writes to stderr for pip 22.0.2,
// or spawn must be started with the shell option enabled for getExecOutput
// Related issue: https://github.com/actions/setup-python/issues/328
if (IS_WINDOWS) {
const execPromisify = utils.promisify(child_process.exec);
({stdout: stdout, stderr: stderr} = await execPromisify('pip cache dir'));
} else {
({
stdout: stdout,
stderr: stderr,
exitCode: exitCode
} = await exec.getExecOutput('pip cache dir'));
}
if (exitCode && stderr) {
throw new Error(
@ -36,8 +57,8 @@ class PipCache extends CacheDistributor {
protected async computeKeys() {
const hash = await glob.hashFiles(this.cacheDependencyPath);
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-${this.packageManager}-${hash}`;
const restoreKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-${this.packageManager}`;
const primaryKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}-${hash}`;
const restoreKey = `${this.CACHE_KEY_PREFIX}-${process.env['RUNNER_OS']}-python-${this.pythonVersion}-${this.packageManager}`;
return {
primaryKey,