Compare commits

...

2 Commits

8 changed files with 192 additions and 61 deletions

View File

@ -1,6 +1,6 @@
---
name: "@actions/cache"
version: 1.0.7
version: 1.0.8
type: npm
summary: Actions cache lib
homepage: https://github.com/actions/toolkit/tree/main/packages/cache

View File

@ -1,6 +1,6 @@
---
name: "@azure/core-http"
version: 2.2.1
version: 2.2.2
type: npm
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
libraries generated using AutoRest

View File

@ -1,6 +1,6 @@
---
name: node-fetch
version: 2.6.5
version: 2.6.6
type: npm
summary: A light-weight module that brings window.fetch to node.js
homepage: https://github.com/bitinn/node-fetch

View File

@ -1041,10 +1041,10 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeIsBytes(filePath) {
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) {
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
});
const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL;
const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
@ -41451,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
@ -41496,18 +41496,29 @@ function saveCache(paths, key, options) {
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
return cacheId;
});
}
@ -53218,7 +53229,12 @@ class HttpHeaders {
* Create a deep clone/copy of this HttpHeaders collection.
*/
clone() {
return new HttpHeaders(this.rawHeaders());
const resultPreservingCasing = {};
for (const headerKey in this._headersMap) {
const header = this._headersMap[headerKey];
resultPreservingCasing[header.name] = header.value;
}
return new HttpHeaders(resultPreservingCasing);
}
}
@ -53255,7 +53271,7 @@ const Constants = {
/**
* The core-http version
*/
coreHttpVersion: "2.2.1",
coreHttpVersion: "2.2.2",
/**
* Specifies HTTP.
*/
@ -55568,7 +55584,7 @@ class FetchHttpClient {
}
let downloadStreamDone = Promise.resolve();
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
}
Promise.all([uploadStreamDone, downloadStreamDone])
.then(() => {
@ -55586,11 +55602,14 @@ class FetchHttpClient {
function isReadableStream(body) {
return body && typeof body.pipe === "function";
}
function isStreamComplete(stream) {
function isStreamComplete(stream, aborter) {
return new Promise((resolve) => {
stream.on("close", resolve);
stream.on("end", resolve);
stream.on("error", resolve);
stream.once("close", () => {
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
resolve();
});
stream.once("end", resolve);
stream.once("error", resolve);
});
}
function parseHeaders(headers) {

67
dist/setup/index.js vendored
View File

@ -1041,10 +1041,10 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeIsBytes(filePath) {
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) {
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
@ -36873,7 +36873,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
});
const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL;
const URL = Url.URL || whatwgUrl.URL;
// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
@ -47198,7 +47198,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
@ -47243,18 +47243,29 @@ function saveCache(paths, key, options) {
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
return cacheId;
});
}
@ -59947,7 +59958,12 @@ class HttpHeaders {
* Create a deep clone/copy of this HttpHeaders collection.
*/
clone() {
return new HttpHeaders(this.rawHeaders());
const resultPreservingCasing = {};
for (const headerKey in this._headersMap) {
const header = this._headersMap[headerKey];
resultPreservingCasing[header.name] = header.value;
}
return new HttpHeaders(resultPreservingCasing);
}
}
@ -59984,7 +60000,7 @@ const Constants = {
/**
* The core-http version
*/
coreHttpVersion: "2.2.1",
coreHttpVersion: "2.2.2",
/**
* Specifies HTTP.
*/
@ -62297,7 +62313,7 @@ class FetchHttpClient {
}
let downloadStreamDone = Promise.resolve();
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
}
Promise.all([uploadStreamDone, downloadStreamDone])
.then(() => {
@ -62315,11 +62331,14 @@ class FetchHttpClient {
function isReadableStream(body) {
return body && typeof body.pipe === "function";
}
function isStreamComplete(stream) {
function isStreamComplete(stream, aborter) {
return new Promise((resolve) => {
stream.on("close", resolve);
stream.on("end", resolve);
stream.on("error", resolve);
stream.once("close", () => {
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
resolve();
});
stream.once("end", resolve);
stream.once("error", resolve);
});
}
function parseHeaders(headers) {

View File

@ -0,0 +1,93 @@
## 0. Caching dependencies
Date: 2021-10-01
Status: Accepted
## Context
`actions/setup-python` is one the most popular python's action in GitHub Actions. A lot of customers use it in conjunction with `actions/cache` to speed up dependencies installation.
See more examples on proper usage in [actions/cache documentation](https://github.com/actions/cache/blob/main/examples.md#python---pip).
## Goals & Anti-Goals
Integration of caching functionality into `actions/setup-python` action will bring the following benefits for action users:
- Decrease the entry threshold for using the cache for Python dependencies and simplify initial configuration
- Simplify YAML pipelines by reducing the need for additional steps to enable caching
- More users will use caching for Python so users will have faster builds!
We will add support for Pip and Pipenv dependency caching.
We won't pursue the goal to provide wide customization of caching in the scope of `actions/setup-python` action. The purpose of this integration is to cover ~90% of basic use-cases. If users need flexible customization, we will advise them to use `actions/cache` directly.
## Decision
- Add a `cache` input parameter to `actions/setup-python`. For now the input will accept the following values:
- pip - enable caching for pip dependencies
- pipenv - enable caching for pipenv dependencies
- '' - disable caching (default value)
- Cache feature will be disabled by default to make sure that we don't break existing customers.
- Action will try to search dependencies files (requirements.txt for pip and Pipfile.lock for pipenv) in the repository root (or relative to the repository root, if patterns are used) and throw error if no one is found.
- The hash of found file will be used as part of cache key (the same approach like actions/cache recommends)
- The following cache key will be used for pip: `setup-python-${{ runner.os }}-pip-${{ hashFiles('<package-file-path>') }}`
- The following cache key will be used for pipenv: `setup-python-${{ runner.os }}-python-${{ python-version }}-pipenv-${{ hashFiles('<package-file-path>') }}`. We add the python version to the cache key because the created virtualenv folder with the project name contains a copy of the python binary as a symlink to paths like `/opt/hostedtoolcache/Python/3.7.11`, so the cache can be fetched with a wrong python version. See details in the related [pull request](https://github.com/actions/cache/pull/607) in the actions/cache.
- Action will save the packages global cache:
- Pip (retrieved via pip cache dir). The command is available With pip 20.1 or later. We always update pip during installation, that is why this command should be available.
- Pipenv (default cache paths):
- ~/.local/share/virtualenvs (macOS)
- ~/.virtualenvs (Windows)
- ~/.local/share/virtualenvs (Ubuntu)
- Add a `cache-dependency-path` input parameter to `actions/setup-python`. The new input will accept an array or regex of dependency files. The field will accept a path (relative to the repository root) to dependency files. If the provided path contains wildcards, the action will search all matching files and calculate a common hash like the `${{ hashFiles('**/requirements-dev.txt') }}` YAML construction does
## Example of real use-cases
- Pip package manager
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pip
```
- Pipenv package manager
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pipenv
```
- With `cache-dependency-path`
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pip
cache-dependency-path: |
**/requirements-dev.txt
**/requirements-test.txt
**/requirements.txt
```
```
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.9
cache: pip
cache-dependency-path: **/requirements-dev.txt
```
## Release process
As soon as the functionality is implemented, we will release a minor update of the action. No need to bump the major version since there are no breaking changes for existing users. After that, we will update [starter-workflows](https://github.com/actions/starter-workflows/blob/main/ci/python-app.yml) and [GitHub Action documentation](https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#caching-dependencies).

18
package-lock.json generated
View File

@ -5,9 +5,9 @@
"requires": true,
"dependencies": {
"@actions/cache": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.7.tgz",
"integrity": "sha512-MY69kxuubqUFq84pFlu8m6Poxl5sR/xyhpC4JEvno7Yg9ASYdGizEmKgt0m8ovewpYKf15UAOcSC0hzS+DuosA==",
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.8.tgz",
"integrity": "sha512-GWNNB67w93HGJRQXlsV56YqrdAuDoP3esK/mo5mzU8WoDCVjtQgJGsTdkYUX7brswtT7xnI30bWNo1WLKQ8FZQ==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@ -136,9 +136,9 @@
}
},
"@azure/core-http": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.1.tgz",
"integrity": "sha512-7ATnV3OGzCO2K9kMrh3NKUM8b4v+xasmlUhkNZz6uMbm+8XH/AexLkhRGsoo0GyKNlEGvyGEfytqTk0nUY2I4A==",
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.2.tgz",
"integrity": "sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==",
"requires": {
"@azure/abort-controller": "^1.0.0",
"@azure/core-asynciterator-polyfill": "^1.0.0",
@ -8137,9 +8137,9 @@
"dev": true
},
"node-fetch": {
"version": "2.6.5",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.5.tgz",
"integrity": "sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ==",
"version": "2.6.6",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.6.tgz",
"integrity": "sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==",
"requires": {
"whatwg-url": "^5.0.0"
},

View File

@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^1.0.7",
"@actions/cache": "^1.0.8",
"@actions/core": "^1.2.3",
"@actions/exec": "^1.1.0",
"@actions/glob": "^0.2.0",