mirror of
https://github.com/actions/setup-python.git
synced 2025-06-25 13:11:08 +02:00
Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
f382193329 | |||
3ef38b826b |
2
.licenses/npm/@actions/cache.dep.yml
generated
2
.licenses/npm/@actions/cache.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: "@actions/cache"
|
name: "@actions/cache"
|
||||||
version: 1.0.7
|
version: 1.0.8
|
||||||
type: npm
|
type: npm
|
||||||
summary: Actions cache lib
|
summary: Actions cache lib
|
||||||
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
|
homepage: https://github.com/actions/toolkit/tree/main/packages/cache
|
||||||
|
2
.licenses/npm/@azure/core-http.dep.yml
generated
2
.licenses/npm/@azure/core-http.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: "@azure/core-http"
|
name: "@azure/core-http"
|
||||||
version: 2.2.1
|
version: 2.2.2
|
||||||
type: npm
|
type: npm
|
||||||
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
summary: Isomorphic client Runtime for Typescript/node.js/browser javascript client
|
||||||
libraries generated using AutoRest
|
libraries generated using AutoRest
|
||||||
|
2
.licenses/npm/node-fetch.dep.yml
generated
2
.licenses/npm/node-fetch.dep.yml
generated
@ -1,6 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: node-fetch
|
name: node-fetch
|
||||||
version: 2.6.5
|
version: 2.6.6
|
||||||
type: npm
|
type: npm
|
||||||
summary: A light-weight module that brings window.fetch to node.js
|
summary: A light-weight module that brings window.fetch to node.js
|
||||||
homepage: https://github.com/bitinn/node-fetch
|
homepage: https://github.com/bitinn/node-fetch
|
||||||
|
53
dist/cache-save/index.js
vendored
53
dist/cache-save/index.js
vendored
@ -1041,10 +1041,10 @@ function createTempDirectory() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTempDirectory = createTempDirectory;
|
exports.createTempDirectory = createTempDirectory;
|
||||||
function getArchiveFileSizeIsBytes(filePath) {
|
function getArchiveFileSizeInBytes(filePath) {
|
||||||
return fs.statSync(filePath).size;
|
return fs.statSync(filePath).size;
|
||||||
}
|
}
|
||||||
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||||
function resolvePaths(patterns) {
|
function resolvePaths(patterns) {
|
||||||
var e_1, _a;
|
var e_1, _a;
|
||||||
var _b;
|
var _b;
|
||||||
@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
|||||||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||||
const fd = fs.openSync(archivePath, 'r');
|
const fd = fs.openSync(archivePath, 'r');
|
||||||
const uploadOptions = options_1.getUploadOptions(options);
|
const uploadOptions = options_1.getUploadOptions(options);
|
||||||
@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) {
|
|||||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug('Commiting cache');
|
core.debug('Commiting cache');
|
||||||
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||||
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||||
@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||||||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
||||||
if (contentLengthHeader) {
|
if (contentLengthHeader) {
|
||||||
const expectedLength = parseInt(contentLengthHeader);
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
|
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
if (actualLength !== expectedLength) {
|
if (actualLength !== expectedLength) {
|
||||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||||
}
|
}
|
||||||
@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const INTERNALS$2 = Symbol('Request internals');
|
const INTERNALS$2 = Symbol('Request internals');
|
||||||
const URL = whatwgUrl.URL;
|
const URL = Url.URL || whatwgUrl.URL;
|
||||||
|
|
||||||
// fix an issue where "format", "parse" aren't a named export for node <10
|
// fix an issue where "format", "parse" aren't a named export for node <10
|
||||||
const parse_url = Url.parse;
|
const parse_url = Url.parse;
|
||||||
@ -41451,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
yield tar_1.listTar(archivePath, compressionMethod);
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
@ -41496,18 +41496,29 @@ function saveCache(paths, key, options) {
|
|||||||
const archiveFolder = yield utils.createTempDirectory();
|
const archiveFolder = yield utils.createTempDirectory();
|
||||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
try {
|
||||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
yield tar_1.listTar(archivePath, compressionMethod);
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.debug(`File Size: ${archiveFileSize}`);
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
if (archiveFileSize > fileSizeLimit) {
|
if (archiveFileSize > fileSizeLimit) {
|
||||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
|
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||||
}
|
}
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
yield utils.unlinkFile(archivePath);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
return cacheId;
|
return cacheId;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -53218,7 +53229,12 @@ class HttpHeaders {
|
|||||||
* Create a deep clone/copy of this HttpHeaders collection.
|
* Create a deep clone/copy of this HttpHeaders collection.
|
||||||
*/
|
*/
|
||||||
clone() {
|
clone() {
|
||||||
return new HttpHeaders(this.rawHeaders());
|
const resultPreservingCasing = {};
|
||||||
|
for (const headerKey in this._headersMap) {
|
||||||
|
const header = this._headersMap[headerKey];
|
||||||
|
resultPreservingCasing[header.name] = header.value;
|
||||||
|
}
|
||||||
|
return new HttpHeaders(resultPreservingCasing);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -53255,7 +53271,7 @@ const Constants = {
|
|||||||
/**
|
/**
|
||||||
* The core-http version
|
* The core-http version
|
||||||
*/
|
*/
|
||||||
coreHttpVersion: "2.2.1",
|
coreHttpVersion: "2.2.2",
|
||||||
/**
|
/**
|
||||||
* Specifies HTTP.
|
* Specifies HTTP.
|
||||||
*/
|
*/
|
||||||
@ -55568,7 +55584,7 @@ class FetchHttpClient {
|
|||||||
}
|
}
|
||||||
let downloadStreamDone = Promise.resolve();
|
let downloadStreamDone = Promise.resolve();
|
||||||
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
|
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
|
||||||
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
|
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
|
||||||
}
|
}
|
||||||
Promise.all([uploadStreamDone, downloadStreamDone])
|
Promise.all([uploadStreamDone, downloadStreamDone])
|
||||||
.then(() => {
|
.then(() => {
|
||||||
@ -55586,11 +55602,14 @@ class FetchHttpClient {
|
|||||||
function isReadableStream(body) {
|
function isReadableStream(body) {
|
||||||
return body && typeof body.pipe === "function";
|
return body && typeof body.pipe === "function";
|
||||||
}
|
}
|
||||||
function isStreamComplete(stream) {
|
function isStreamComplete(stream, aborter) {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
stream.on("close", resolve);
|
stream.once("close", () => {
|
||||||
stream.on("end", resolve);
|
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
|
||||||
stream.on("error", resolve);
|
resolve();
|
||||||
|
});
|
||||||
|
stream.once("end", resolve);
|
||||||
|
stream.once("error", resolve);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function parseHeaders(headers) {
|
function parseHeaders(headers) {
|
||||||
|
53
dist/setup/index.js
vendored
53
dist/setup/index.js
vendored
@ -1041,10 +1041,10 @@ function createTempDirectory() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.createTempDirectory = createTempDirectory;
|
exports.createTempDirectory = createTempDirectory;
|
||||||
function getArchiveFileSizeIsBytes(filePath) {
|
function getArchiveFileSizeInBytes(filePath) {
|
||||||
return fs.statSync(filePath).size;
|
return fs.statSync(filePath).size;
|
||||||
}
|
}
|
||||||
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
|
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||||
function resolvePaths(patterns) {
|
function resolvePaths(patterns) {
|
||||||
var e_1, _a;
|
var e_1, _a;
|
||||||
var _b;
|
var _b;
|
||||||
@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
|||||||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
const fileSize = fs.statSync(archivePath).size;
|
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||||
const fd = fs.openSync(archivePath, 'r');
|
const fd = fs.openSync(archivePath, 'r');
|
||||||
const uploadOptions = options_1.getUploadOptions(options);
|
const uploadOptions = options_1.getUploadOptions(options);
|
||||||
@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) {
|
|||||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug('Commiting cache');
|
core.debug('Commiting cache');
|
||||||
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||||
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||||
@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||||||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
||||||
if (contentLengthHeader) {
|
if (contentLengthHeader) {
|
||||||
const expectedLength = parseInt(contentLengthHeader);
|
const expectedLength = parseInt(contentLengthHeader);
|
||||||
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
|
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
if (actualLength !== expectedLength) {
|
if (actualLength !== expectedLength) {
|
||||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||||
}
|
}
|
||||||
@ -36873,7 +36873,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const INTERNALS$2 = Symbol('Request internals');
|
const INTERNALS$2 = Symbol('Request internals');
|
||||||
const URL = whatwgUrl.URL;
|
const URL = Url.URL || whatwgUrl.URL;
|
||||||
|
|
||||||
// fix an issue where "format", "parse" aren't a named export for node <10
|
// fix an issue where "format", "parse" aren't a named export for node <10
|
||||||
const parse_url = Url.parse;
|
const parse_url = Url.parse;
|
||||||
@ -47198,7 +47198,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
yield tar_1.listTar(archivePath, compressionMethod);
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||||
core.info('Cache restored successfully');
|
core.info('Cache restored successfully');
|
||||||
@ -47243,18 +47243,29 @@ function saveCache(paths, key, options) {
|
|||||||
const archiveFolder = yield utils.createTempDirectory();
|
const archiveFolder = yield utils.createTempDirectory();
|
||||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||||
core.debug(`Archive Path: ${archivePath}`);
|
core.debug(`Archive Path: ${archivePath}`);
|
||||||
|
try {
|
||||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
yield tar_1.listTar(archivePath, compressionMethod);
|
yield tar_1.listTar(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.debug(`File Size: ${archiveFileSize}`);
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
if (archiveFileSize > fileSizeLimit) {
|
if (archiveFileSize > fileSizeLimit) {
|
||||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
|
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||||
}
|
}
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
// Try to delete the archive to save space
|
||||||
|
try {
|
||||||
|
yield utils.unlinkFile(archivePath);
|
||||||
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.debug(`Failed to delete archive: ${error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
return cacheId;
|
return cacheId;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -59947,7 +59958,12 @@ class HttpHeaders {
|
|||||||
* Create a deep clone/copy of this HttpHeaders collection.
|
* Create a deep clone/copy of this HttpHeaders collection.
|
||||||
*/
|
*/
|
||||||
clone() {
|
clone() {
|
||||||
return new HttpHeaders(this.rawHeaders());
|
const resultPreservingCasing = {};
|
||||||
|
for (const headerKey in this._headersMap) {
|
||||||
|
const header = this._headersMap[headerKey];
|
||||||
|
resultPreservingCasing[header.name] = header.value;
|
||||||
|
}
|
||||||
|
return new HttpHeaders(resultPreservingCasing);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -59984,7 +60000,7 @@ const Constants = {
|
|||||||
/**
|
/**
|
||||||
* The core-http version
|
* The core-http version
|
||||||
*/
|
*/
|
||||||
coreHttpVersion: "2.2.1",
|
coreHttpVersion: "2.2.2",
|
||||||
/**
|
/**
|
||||||
* Specifies HTTP.
|
* Specifies HTTP.
|
||||||
*/
|
*/
|
||||||
@ -62297,7 +62313,7 @@ class FetchHttpClient {
|
|||||||
}
|
}
|
||||||
let downloadStreamDone = Promise.resolve();
|
let downloadStreamDone = Promise.resolve();
|
||||||
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
|
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
|
||||||
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
|
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
|
||||||
}
|
}
|
||||||
Promise.all([uploadStreamDone, downloadStreamDone])
|
Promise.all([uploadStreamDone, downloadStreamDone])
|
||||||
.then(() => {
|
.then(() => {
|
||||||
@ -62315,11 +62331,14 @@ class FetchHttpClient {
|
|||||||
function isReadableStream(body) {
|
function isReadableStream(body) {
|
||||||
return body && typeof body.pipe === "function";
|
return body && typeof body.pipe === "function";
|
||||||
}
|
}
|
||||||
function isStreamComplete(stream) {
|
function isStreamComplete(stream, aborter) {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
stream.on("close", resolve);
|
stream.once("close", () => {
|
||||||
stream.on("end", resolve);
|
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
|
||||||
stream.on("error", resolve);
|
resolve();
|
||||||
|
});
|
||||||
|
stream.once("end", resolve);
|
||||||
|
stream.once("error", resolve);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function parseHeaders(headers) {
|
function parseHeaders(headers) {
|
||||||
|
93
docs/adrs/0000-caching-dependencies.md
Normal file
93
docs/adrs/0000-caching-dependencies.md
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
## 0. Caching dependencies
|
||||||
|
|
||||||
|
Date: 2021-10-01
|
||||||
|
|
||||||
|
Status: Accepted
|
||||||
|
|
||||||
|
## Context
|
||||||
|
|
||||||
|
`actions/setup-python` is one the most popular python's action in GitHub Actions. A lot of customers use it in conjunction with `actions/cache` to speed up dependencies installation.
|
||||||
|
See more examples on proper usage in [actions/cache documentation](https://github.com/actions/cache/blob/main/examples.md#python---pip).
|
||||||
|
|
||||||
|
## Goals & Anti-Goals
|
||||||
|
|
||||||
|
Integration of caching functionality into `actions/setup-python` action will bring the following benefits for action users:
|
||||||
|
- Decrease the entry threshold for using the cache for Python dependencies and simplify initial configuration
|
||||||
|
- Simplify YAML pipelines by reducing the need for additional steps to enable caching
|
||||||
|
- More users will use caching for Python so users will have faster builds!
|
||||||
|
|
||||||
|
We will add support for Pip and Pipenv dependency caching.
|
||||||
|
|
||||||
|
We won't pursue the goal to provide wide customization of caching in the scope of `actions/setup-python` action. The purpose of this integration is to cover ~90% of basic use-cases. If users need flexible customization, we will advise them to use `actions/cache` directly.
|
||||||
|
|
||||||
|
## Decision
|
||||||
|
|
||||||
|
- Add a `cache` input parameter to `actions/setup-python`. For now the input will accept the following values:
|
||||||
|
- pip - enable caching for pip dependencies
|
||||||
|
- pipenv - enable caching for pipenv dependencies
|
||||||
|
- '' - disable caching (default value)
|
||||||
|
- Cache feature will be disabled by default to make sure that we don't break existing customers.
|
||||||
|
- Action will try to search dependencies files (requirements.txt for pip and Pipfile.lock for pipenv) in the repository root (or relative to the repository root, if patterns are used) and throw error if no one is found.
|
||||||
|
- The hash of found file will be used as part of cache key (the same approach like actions/cache recommends)
|
||||||
|
- The following cache key will be used for pip: `setup-python-${{ runner.os }}-pip-${{ hashFiles('<package-file-path>') }}`
|
||||||
|
- The following cache key will be used for pipenv: `setup-python-${{ runner.os }}-python-${{ python-version }}-pipenv-${{ hashFiles('<package-file-path>') }}`. We add the python version to the cache key because the created virtualenv folder with the project name contains a copy of the python binary as a symlink to paths like `/opt/hostedtoolcache/Python/3.7.11`, so the cache can be fetched with a wrong python version. See details in the related [pull request](https://github.com/actions/cache/pull/607) in the actions/cache.
|
||||||
|
- Action will save the packages global cache:
|
||||||
|
- Pip (retrieved via pip cache dir). The command is available With pip 20.1 or later. We always update pip during installation, that is why this command should be available.
|
||||||
|
- Pipenv (default cache paths):
|
||||||
|
- ~/.local/share/virtualenvs (macOS)
|
||||||
|
- ~/.virtualenvs (Windows)
|
||||||
|
- ~/.local/share/virtualenvs (Ubuntu)
|
||||||
|
- Add a `cache-dependency-path` input parameter to `actions/setup-python`. The new input will accept an array or regex of dependency files. The field will accept a path (relative to the repository root) to dependency files. If the provided path contains wildcards, the action will search all matching files and calculate a common hash like the `${{ hashFiles('**/requirements-dev.txt') }}` YAML construction does
|
||||||
|
|
||||||
|
## Example of real use-cases
|
||||||
|
|
||||||
|
- Pip package manager
|
||||||
|
|
||||||
|
```
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
cache: pip
|
||||||
|
```
|
||||||
|
|
||||||
|
- Pipenv package manager
|
||||||
|
|
||||||
|
```
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
cache: pipenv
|
||||||
|
```
|
||||||
|
- With `cache-dependency-path`
|
||||||
|
|
||||||
|
```
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: |
|
||||||
|
**/requirements-dev.txt
|
||||||
|
**/requirements-test.txt
|
||||||
|
**/requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: **/requirements-dev.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Release process
|
||||||
|
|
||||||
|
As soon as the functionality is implemented, we will release a minor update of the action. No need to bump the major version since there are no breaking changes for existing users. After that, we will update [starter-workflows](https://github.com/actions/starter-workflows/blob/main/ci/python-app.yml) and [GitHub Action documentation](https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#caching-dependencies).
|
18
package-lock.json
generated
18
package-lock.json
generated
@ -5,9 +5,9 @@
|
|||||||
"requires": true,
|
"requires": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": {
|
"@actions/cache": {
|
||||||
"version": "1.0.7",
|
"version": "1.0.8",
|
||||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.7.tgz",
|
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.8.tgz",
|
||||||
"integrity": "sha512-MY69kxuubqUFq84pFlu8m6Poxl5sR/xyhpC4JEvno7Yg9ASYdGizEmKgt0m8ovewpYKf15UAOcSC0hzS+DuosA==",
|
"integrity": "sha512-GWNNB67w93HGJRQXlsV56YqrdAuDoP3esK/mo5mzU8WoDCVjtQgJGsTdkYUX7brswtT7xnI30bWNo1WLKQ8FZQ==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@actions/core": "^1.2.6",
|
"@actions/core": "^1.2.6",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
@ -136,9 +136,9 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"@azure/core-http": {
|
"@azure/core-http": {
|
||||||
"version": "2.2.1",
|
"version": "2.2.2",
|
||||||
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.1.tgz",
|
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.2.tgz",
|
||||||
"integrity": "sha512-7ATnV3OGzCO2K9kMrh3NKUM8b4v+xasmlUhkNZz6uMbm+8XH/AexLkhRGsoo0GyKNlEGvyGEfytqTk0nUY2I4A==",
|
"integrity": "sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"@azure/abort-controller": "^1.0.0",
|
"@azure/abort-controller": "^1.0.0",
|
||||||
"@azure/core-asynciterator-polyfill": "^1.0.0",
|
"@azure/core-asynciterator-polyfill": "^1.0.0",
|
||||||
@ -8137,9 +8137,9 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node-fetch": {
|
"node-fetch": {
|
||||||
"version": "2.6.5",
|
"version": "2.6.6",
|
||||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.5.tgz",
|
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.6.tgz",
|
||||||
"integrity": "sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ==",
|
"integrity": "sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==",
|
||||||
"requires": {
|
"requires": {
|
||||||
"whatwg-url": "^5.0.0"
|
"whatwg-url": "^5.0.0"
|
||||||
},
|
},
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
"author": "GitHub",
|
"author": "GitHub",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/cache": "^1.0.7",
|
"@actions/cache": "^1.0.8",
|
||||||
"@actions/core": "^1.2.3",
|
"@actions/core": "^1.2.3",
|
||||||
"@actions/exec": "^1.1.0",
|
"@actions/exec": "^1.1.0",
|
||||||
"@actions/glob": "^0.2.0",
|
"@actions/glob": "^0.2.0",
|
||||||
|
Reference in New Issue
Block a user