Compare commits

..

28 Commits

Author SHA1 Message Date
cdec5dec0d Format and update tests 2019-12-18 10:59:51 -05:00
2ce22df8c4 Test out 16 concurrency with 32mb chunks 2019-12-17 17:52:59 -05:00
8c77f01f0b Test out 16 concurrent requests 2019-12-17 17:35:30 -05:00
4fcbc07edb Test out higher concurrency limits 2019-12-17 17:26:05 -05:00
16019b42a9 Fix threads array 2019-12-17 17:19:16 -05:00
73a15dc5a9 Add more debug logging 2019-12-17 17:13:37 -05:00
574cd74b58 ? 2019-12-17 17:08:48 -05:00
14055801c2 Add missing await 2019-12-17 17:01:57 -05:00
289c5d2518 Concurrency take 2 2019-12-17 16:59:18 -05:00
ba6476e454 Bad implementation of parallel await 2019-12-17 16:17:55 -05:00
b425e87f79 Don't autoclose file 2019-12-17 15:46:56 -05:00
83f86c103f Make uploads serial 2019-12-17 15:43:50 -05:00
64668e22dd Move hashsum after tar creation 2019-12-17 15:11:47 -05:00
1c77f64ab3 Use correct hashing program 2019-12-17 15:05:04 -05:00
a70833fb48 Add debug hashing (won't work on windows) 2019-12-17 14:57:36 -05:00
b25804d19e Fix resource URLs 2019-12-17 14:52:40 -05:00
577b274c51 More debugging 2019-12-17 14:47:57 -05:00
0816faf84c Use fs streams directly 2019-12-17 14:40:24 -05:00
131e247bd2 Change to on end 2019-12-17 14:22:32 -05:00
2cbd952179 Add more debugging 2019-12-17 14:16:15 -05:00
994e3b75fc Add request header and debug statements 2019-12-17 14:10:58 -05:00
21dc9a47e6 Add release files 2019-12-17 13:56:42 -05:00
436418ea07 Separate out reserve call 2019-12-17 13:56:10 -05:00
7f6523f535 Linting tests 2019-12-13 15:39:29 -05:00
4d3086b6b8 Fix download cache entry tests 2019-12-13 15:37:15 -05:00
d788427754 Linting 2019-12-13 15:33:33 -05:00
a8adbe4b05 Fix cacheEntry type 2019-12-13 15:32:00 -05:00
bad827c28e Initial pass at chunked upload apis 2019-12-13 15:19:25 -05:00
13 changed files with 360 additions and 603 deletions

View File

@ -35,7 +35,7 @@ on: push
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
@ -49,14 +49,14 @@ jobs:
- name: Generate Prime Numbers
if: steps.cache-primes.outputs.cache-hit != 'true'
run: /generate-primes.sh -d prime-numbers
- name: Use Prime Numbers
run: /primes.sh -d prime-numbers
```
## Implementation Examples
Every programming language and framework has its own way of caching.
Every programming language and framework has it's own way of caching.
See [Examples](examples.md) for a list of `actions/cache` implementations for use with:
@ -93,7 +93,7 @@ steps:
with:
path: path/to/dependencies
key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }}
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: /install.sh

View File

@ -1,16 +1,18 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/restore";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("@actions/exec");
jest.mock("@actions/io");
jest.mock("../src/utils/actionUtils");
jest.mock("../src/cacheHttpClient");
beforeAll(() => {
jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => {
@ -33,6 +35,10 @@ beforeAll(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.getSupportedEvents();
});
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
});
beforeEach(() => {
@ -239,7 +245,8 @@ test("restore with cache found", async () => {
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
@ -253,9 +260,22 @@ test("restore with cache found", async () => {
archivePath
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
@ -306,7 +326,8 @@ test("restore with a pull request event and cache found", async () => {
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
@ -321,9 +342,22 @@ test("restore with a pull request event and cache found", async () => {
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(true);
@ -374,7 +408,8 @@ test("restore with cache found for restore key", async () => {
.spyOn(actionUtils, "getArchiveFileSize")
.mockReturnValue(fileSize);
const extractTarMock = jest.spyOn(tar, "extractTar");
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput");
await run();
@ -389,9 +424,22 @@ test("restore with cache found for restore key", async () => {
);
expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath);
expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`);
expect(mkdirMock).toHaveBeenCalledWith(cachePath);
expect(extractTarMock).toHaveBeenCalledTimes(1);
expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1);
expect(setCacheHitOutputMock).toHaveBeenCalledWith(false);

View File

@ -1,17 +1,19 @@
import * as core from "@actions/core";
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "../src/cacheHttpClient";
import { Events, Inputs } from "../src/constants";
import { ArtifactCacheEntry } from "../src/contracts";
import run from "../src/save";
import * as tar from "../src/tar";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
jest.mock("@actions/core");
jest.mock("../src/cacheHttpClient");
jest.mock("../src/tar");
jest.mock("@actions/exec");
jest.mock("@actions/io");
jest.mock("../src/utils/actionUtils");
jest.mock("../src/cacheHttpClient");
beforeAll(() => {
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
@ -47,6 +49,10 @@ beforeAll(() => {
jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => {
return Promise.resolve("/foo/bar");
});
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
});
beforeEach(() => {
@ -122,7 +128,7 @@ test("save with exact match returns early", async () => {
return primaryKey;
});
const createTarMock = jest.spyOn(tar, "createTar");
const execMock = jest.spyOn(exec, "exec");
await run();
@ -130,7 +136,7 @@ test("save with exact match returns early", async () => {
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(execMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
@ -192,7 +198,7 @@ test("save with large cache outputs warning", async () => {
const cachePath = path.resolve(inputPath);
testUtils.setInput(Inputs.Path, inputPath);
const createTarMock = jest.spyOn(tar, "createTar");
const execMock = jest.spyOn(exec, "exec");
const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit
jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => {
@ -203,68 +209,30 @@ test("save with large cache outputs warning", async () => {
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(logWarningMock).toHaveBeenCalledTimes(1);
expect(logWarningMock).toHaveBeenCalledWith(
"Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache."
"Cache size of ~4 GB (4294967296 B) is over the 2GB limit, not saving cache."
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with reserve cache failure outputs warning", async () => {
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const cacheEntry: ArtifactCacheEntry = {
cacheKey: "Linux-node-",
scope: "refs/heads/master",
creationTime: "2019-11-13T19:18:02+00:00",
archiveLocation: "www.actionscache.test/download"
};
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return JSON.stringify(cacheEntry);
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(-1);
});
const createTarMock = jest.spyOn(tar, "createTar");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
await run();
expect(reserveCacheMock).toHaveBeenCalledTimes(1);
expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey);
expect(infoMock).toHaveBeenCalledWith(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
expect(createTarMock).toHaveBeenCalledTimes(0);
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with server error outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
@ -292,13 +260,11 @@ test("save with server error outputs warning", async () => {
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const execMock = jest.spyOn(exec, "exec");
const saveCacheMock = jest
.spyOn(cacheHttpClient, "saveCache")
@ -313,8 +279,21 @@ test("save with server error outputs warning", async () => {
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);
@ -351,13 +330,11 @@ test("save with valid inputs uploads a cache", async () => {
testUtils.setInput(Inputs.Path, inputPath);
const cacheId = 4;
const reserveCacheMock = jest
.spyOn(cacheHttpClient, "reserveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const reserveCacheMock = jest.spyOn(cacheHttpClient, "reserveCache").mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
const createTarMock = jest.spyOn(tar, "createTar");
const execMock = jest.spyOn(exec, "exec");
const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache");
@ -368,8 +345,21 @@ test("save with valid inputs uploads a cache", async () => {
const archivePath = path.join("/foo/bar", "cache.tgz");
expect(createTarMock).toHaveBeenCalledTimes(1);
expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath);
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"tar"`, args);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath);

View File

@ -1,58 +0,0 @@
import * as exec from "@actions/exec";
import * as io from "@actions/io";
import * as tar from "../src/tar";
jest.mock("@actions/exec");
jest.mock("@actions/io");
beforeAll(() => {
jest.spyOn(io, "which").mockImplementation(tool => {
return Promise.resolve(tool);
});
});
test("extract tar", async () => {
const mkdirMock = jest.spyOn(io, "mkdirP");
const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar";
const targetDirectory = "~/.npm/cache";
await tar.extractTar(archivePath, targetDirectory);
expect(mkdirMock).toHaveBeenCalledWith(targetDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-xz",
"-f",
archivePath,
"-C",
targetDirectory
]);
});
test("create tar", async () => {
const execMock = jest.spyOn(exec, "exec");
const archivePath = "cache.tar";
const sourceDirectory = "~/.npm/cache";
await tar.createTar(archivePath, sourceDirectory);
const IS_WINDOWS = process.platform === "win32";
const tarPath = IS_WINDOWS
? `${process.env["windir"]}\\System32\\tar.exe`
: "tar";
expect(execMock).toHaveBeenCalledTimes(1);
expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [
"-cz",
"-f",
archivePath,
"-C",
sourceDirectory,
"."
]);
});

212
dist/restore/index.js vendored
View File

@ -1500,14 +1500,6 @@ const utils = __importStar(__webpack_require__(443));
function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode) {
const retryableStatusCodes = [
HttpClient_1.HttpCodes.BadGateway,
HttpClient_1.HttpCodes.ServiceUnavailable,
HttpClient_1.HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl() {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
@ -1598,74 +1590,21 @@ function getContentRange(start, end) {
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
// function bufferToStream(buffer: Buffer): NodeJS.ReadableStream {
// const stream = new Duplex();
// stream.push(buffer);
// stream.push(null);
// return stream;
// }
function uploadChunk(restClient, resourceUrl, data, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions);
});
const response = yield uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`);
const retryResponse = yield uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
return;
}
}
throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`);
});
}
function parseEnvNumber(key) {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
function uploadFile(restClient, cacheId, archivePath) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const fd = fs.openSync(archivePath, "r");
const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY"), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE"), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
yield uploadChunk(restClient, resourceUrl, chunk, start, end);
}
})));
}
finally {
fs.closeSync(fd);
}
return;
return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions);
});
}
function commitCache(restClient, cacheId, filesize) {
@ -1675,13 +1614,44 @@ function commitCache(restClient, cacheId, filesize) {
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
});
}
function uploadFile(restClient, cacheId, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses = [];
const fd = fs.openSync(archivePath, "r");
const concurrency = 16; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
}
})));
fs.closeSync(fd);
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
if (failedResponse) {
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
}
return;
});
}
function saveCache(cacheId, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
core.debug("Upload cache");
yield uploadFile(restClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
// Commit Cache
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
@ -3097,13 +3067,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const path = __importStar(__webpack_require__(622));
const cacheHttpClient = __importStar(__webpack_require__(154));
const constants_1 = __webpack_require__(694);
const tar_1 = __webpack_require__(943);
const utils = __importStar(__webpack_require__(443));
function run() {
var _a;
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
// Validate inputs, this can cause task failure
@ -3141,7 +3112,7 @@ function run() {
}
try {
const cacheEntry = yield cacheHttpClient.getCacheEntry(keys);
if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) {
if (!cacheEntry || !((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) {
core.info(`Cache not found for input keys: ${keys.join(", ")}.`);
return;
}
@ -3150,10 +3121,28 @@ function run() {
// Store the cache result
utils.setCacheState(cacheEntry);
// Download the cache from the cache entry
yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath);
yield cacheHttpClient.downloadCache((_b = cacheEntry) === null || _b === void 0 ? void 0 : _b.archiveLocation, archivePath);
yield exec_1.exec(`md5sum`, [archivePath]);
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, cachePath);
// Create directory to extract tar into
yield io.mkdirP(cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
const tarPath = yield io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
yield exec_1.exec(`"${tarPath}"`, args);
const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry);
utils.setCacheHitOutput(isExactKeyMatch);
core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`);
@ -5249,79 +5238,6 @@ var personalaccesstoken_1 = __webpack_require__(327);
exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler;
/***/ }),
/***/ 943:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747);
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (fs_1.existsSync(systemTar)) {
return systemTar;
}
}
return yield io.which("tar", true);
});
}
function execTar(args) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
yield exec_1.exec(`"${yield getTarPath()}"`, args);
}
catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`);
}
throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`);
}
});
}
function extractTar(archivePath, targetDirectory) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
yield io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
yield execTar(args);
});
}
exports.extractTar = extractTar;
function createTar(archivePath, sourceDirectory) {
return __awaiter(this, void 0, void 0, function* () {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
yield execTar(args);
});
}
exports.createTar = createTar;
/***/ }),
/***/ 986:

211
dist/save/index.js vendored
View File

@ -1500,14 +1500,6 @@ const utils = __importStar(__webpack_require__(443));
function isSuccessStatusCode(statusCode) {
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode) {
const retryableStatusCodes = [
HttpClient_1.HttpCodes.BadGateway,
HttpClient_1.HttpCodes.ServiceUnavailable,
HttpClient_1.HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl() {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl = (process.env["ACTIONS_CACHE_URL"] ||
@ -1598,74 +1590,21 @@ function getContentRange(start, end) {
// Content-Range: bytes 0-199/*
return `bytes ${start}-${end}/*`;
}
// function bufferToStream(buffer: Buffer): NodeJS.ReadableStream {
// const stream = new Duplex();
// stream.push(buffer);
// stream.push(null);
// return stream;
// }
function uploadChunk(restClient, resourceUrl, data, start, end) {
return __awaiter(this, void 0, void 0, function* () {
core.debug(`Uploading chunk of size ${end -
start +
1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
const requestOptions = getRequestOptions();
requestOptions.additionalHeaders = {
"Content-Type": "application/octet-stream",
"Content-Range": getContentRange(start, end)
};
const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions);
});
const response = yield uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`);
const retryResponse = yield uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
return;
}
}
throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`);
});
}
function parseEnvNumber(key) {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
function uploadFile(restClient, cacheId, archivePath) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const fd = fs.openSync(archivePath, "r");
const concurrency = (_a = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY"), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = (_b = parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE"), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
yield uploadChunk(restClient, resourceUrl, chunk, start, end);
}
})));
}
finally {
fs.closeSync(fd);
}
return;
return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions);
});
}
function commitCache(restClient, cacheId, filesize) {
@ -1675,13 +1614,44 @@ function commitCache(restClient, cacheId, filesize) {
return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions);
});
}
function uploadFile(restClient, cacheId, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses = [];
const fd = fs.openSync(archivePath, "r");
const concurrency = 16; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
while (offset < fileSize) {
const chunkSize = offset + MAX_CHUNK_SIZE > fileSize ? fileSize - offset : MAX_CHUNK_SIZE;
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, { fd, start, end, autoClose: false });
responses.push(yield uploadChunk(restClient, resourceUrl, chunk, start, end));
}
})));
fs.closeSync(fd);
const failedResponse = responses.find(x => !isSuccessStatusCode(x.statusCode));
if (failedResponse) {
throw new Error(`Cache service responded with ${failedResponse.statusCode} during chunk upload.`);
}
return;
});
}
function saveCache(cacheId, archivePath) {
return __awaiter(this, void 0, void 0, function* () {
const restClient = createRestClient();
core.debug("Upload cache");
yield uploadFile(restClient, cacheId, archivePath);
// Commit Cache
core.debug("Commiting cache");
// Commit Cache
const cacheSize = utils.getArchiveFileSize(archivePath);
const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize);
if (!isSuccessStatusCode(commitCacheResponse.statusCode)) {
@ -2985,10 +2955,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(__webpack_require__(470));
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const path = __importStar(__webpack_require__(622));
const cacheHttpClient = __importStar(__webpack_require__(154));
const constants_1 = __webpack_require__(694);
const tar_1 = __webpack_require__(943);
const utils = __importStar(__webpack_require__(443));
function run() {
return __awaiter(this, void 0, void 0, function* () {
@ -3012,7 +2983,7 @@ function run() {
}
core.debug("Reserving Cache");
const cacheId = yield cacheHttpClient.reserveCache(primaryKey);
if (cacheId == -1) {
if (cacheId < 0) {
core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`);
return;
}
@ -3021,15 +2992,32 @@ function run() {
core.debug(`Cache Path: ${cachePath}`);
const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz");
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archivePath, cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
const tarPath = yield io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
yield exec_1.exec(`"${tarPath}"`, args);
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`);
utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024 * 1024))} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`);
return;
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield exec_1.exec(`md5sum`, [archivePath]);
core.debug("Saving Cache");
yield cacheHttpClient.saveCache(cacheId, archivePath);
}
catch (error) {
@ -5230,79 +5218,6 @@ var personalaccesstoken_1 = __webpack_require__(327);
exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler;
/***/ }),
/***/ 943:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const exec_1 = __webpack_require__(986);
const io = __importStar(__webpack_require__(1));
const fs_1 = __webpack_require__(747);
function getTarPath() {
return __awaiter(this, void 0, void 0, function* () {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (fs_1.existsSync(systemTar)) {
return systemTar;
}
}
return yield io.which("tar", true);
});
}
function execTar(args) {
var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
try {
yield exec_1.exec(`"${yield getTarPath()}"`, args);
}
catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`);
}
throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`);
}
});
}
function extractTar(archivePath, targetDirectory) {
return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into
yield io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
yield execTar(args);
});
}
exports.extractTar = extractTar;
function createTar(archivePath, sourceDirectory) {
return __awaiter(this, void 0, void 0, function* () {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
yield execTar(args);
});
}
exports.createTar = createTar;
/***/ }),
/***/ 986:

View File

@ -1,6 +1,6 @@
# Examples
- [C# - NuGet](#c---nuget)
- [C# - Nuget](#c---nuget)
- [Elixir - Mix](#elixir---mix)
- [Go - Modules](#go---modules)
- [Java - Gradle](#java---gradle)
@ -14,7 +14,7 @@
- [Swift, Objective-C - Carthage](#swift-objective-c---carthage)
- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods)
## C# - NuGet
## C# - Nuget
Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies):
```yaml
@ -26,21 +26,6 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa
${{ runner.os }}-nuget-
```
Depending on the environment, huge packages might be pre-installed in the global cache folder.
If you do not want to include them, consider to move the cache folder like below.
>Note: This workflow does not work for projects that require files to be placed in user profile package folder
```yaml
env:
NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages
steps:
- uses: actions/cache@v1
with:
path: ${{ github.workspace }}/.nuget/packages
key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }}
restore-keys: |
${{ runner.os }}-nuget-
```
## Elixir - Mix
```yaml
- uses: actions/cache@v1

14
package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "cache",
"version": "1.1.0",
"version": "1.0.3",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -2854,9 +2854,9 @@
"dev": true
},
"handlebars": {
"version": "4.5.3",
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.3.tgz",
"integrity": "sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==",
"version": "4.5.1",
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz",
"integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==",
"dev": true,
"requires": {
"neo-async": "^2.6.0",
@ -5989,9 +5989,9 @@
"dev": true
},
"uglify-js": {
"version": "3.7.3",
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.7.3.tgz",
"integrity": "sha512-7tINm46/3puUA4hCkKYo4Xdts+JDaVC9ZPRcG8Xw9R4nhO/gZgUM3TENq8IF4Vatk8qCig4MzP/c8G4u2BkVQg==",
"version": "3.6.7",
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz",
"integrity": "sha512-4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A==",
"dev": true,
"optional": true,
"requires": {

View File

@ -1,7 +1,7 @@
import * as core from "@actions/core";
import * as fs from "fs";
import { BearerCredentialHandler } from "typed-rest-client/Handlers";
import { HttpClient, HttpCodes } from "typed-rest-client/HttpClient";
import { HttpClient } from "typed-rest-client/HttpClient";
import { IHttpClientResponse } from "typed-rest-client/Interfaces";
import {
IRequestOptions,
@ -12,23 +12,13 @@ import {
ArtifactCacheEntry,
CommitCacheRequest,
ReserveCacheRequest,
ReserveCacheResponse
ReserverCacheResponse
} from "./contracts";
import * as utils from "./utils/actionUtils";
function isSuccessStatusCode(statusCode: number): boolean {
return statusCode >= 200 && statusCode < 300;
}
function isRetryableStatusCode(statusCode: number): boolean {
const retryableStatusCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
return retryableStatusCodes.includes(statusCode);
}
function getCacheApiUrl(): string {
// Ideally we just use ACTIONS_CACHE_URL
const baseUrl: string = (
@ -123,7 +113,7 @@ export async function reserveCache(key: string): Promise<number> {
const reserveCacheRequest: ReserveCacheRequest = {
key
};
const response = await restClient.create<ReserveCacheResponse>(
const response = await restClient.create<ReserverCacheResponse>(
"caches",
reserveCacheRequest,
getRequestOptions()
@ -147,7 +137,7 @@ async function uploadChunk(
data: NodeJS.ReadableStream,
start: number,
end: number
): Promise<void> {
): Promise<IRestResponse<void>> {
core.debug(
`Uploading chunk of size ${end -
start +
@ -162,43 +152,14 @@ async function uploadChunk(
"Content-Range": getContentRange(start, end)
};
const uploadChunkRequest = async (): Promise<IRestResponse<void>> => {
return await restClient.uploadStream<void>(
"PATCH",
resourceUrl,
data,
requestOptions
);
};
const response = await uploadChunkRequest();
if (isSuccessStatusCode(response.statusCode)) {
return;
}
if (isRetryableStatusCode(response.statusCode)) {
core.debug(
`Received ${response.statusCode}, retrying chunk at offset ${start}.`
);
const retryResponse = await uploadChunkRequest();
if (isSuccessStatusCode(retryResponse.statusCode)) {
return;
}
}
throw new Error(
`Cache service responded with ${response.statusCode} during chunk upload.`
return await restClient.uploadStream<void>(
"PATCH",
resourceUrl,
data,
requestOptions
);
}
function parseEnvNumber(key: string): number | undefined {
const value = Number(process.env[key]);
if (Number.isNaN(value) || value < 0) {
return undefined;
}
return value;
}
async function uploadFile(
restClient: RestClient,
cacheId: number,
@ -207,48 +168,56 @@ async function uploadFile(
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString();
const responses: IRestResponse<void>[] = [];
const fd = fs.openSync(archivePath, "r");
const concurrency = parseEnvNumber("CACHE_UPLOAD_CONCURRENCY") ?? 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE =
parseEnvNumber("CACHE_UPLOAD_CHUNK_SIZE") ?? 32 * 1024 * 1024; // 32 MB Chunks
const concurrency = 4; // # of HTTP requests in parallel
const MAX_CHUNK_SIZE = 32000000; // 32 MB Chunks
core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`);
const parallelUploads = [...new Array(concurrency).keys()];
core.debug("Awaiting all uploads");
let offset = 0;
try {
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize = Math.min(
fileSize - offset,
MAX_CHUNK_SIZE
);
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
await Promise.all(
parallelUploads.map(async () => {
while (offset < fileSize) {
const chunkSize =
offset + MAX_CHUNK_SIZE > fileSize
? fileSize - offset
: MAX_CHUNK_SIZE;
const start = offset;
const end = offset + chunkSize - 1;
offset += MAX_CHUNK_SIZE;
const chunk = fs.createReadStream(archivePath, {
fd,
start,
end,
autoClose: false
});
responses.push(
await uploadChunk(
restClient,
resourceUrl,
chunk,
start,
end
);
}
})
)
);
}
})
);
fs.closeSync(fd);
const failedResponse = responses.find(
x => !isSuccessStatusCode(x.statusCode)
);
if (failedResponse) {
throw new Error(
`Cache service responded with ${failedResponse.statusCode} during chunk upload.`
);
} finally {
fs.closeSync(fd);
}
return;
}

2
src/contracts.d.ts vendored
View File

@ -14,6 +14,6 @@ export interface ReserveCacheRequest {
version?: string;
}
export interface ReserveCacheResponse {
export interface ReserverCacheResponse {
cacheId: number;
}

View File

@ -1,8 +1,9 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { extractTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
@ -60,7 +61,7 @@ async function run(): Promise<void> {
try {
const cacheEntry = await cacheHttpClient.getCacheEntry(keys);
if (!cacheEntry?.archiveLocation) {
if (!cacheEntry || !cacheEntry?.archiveLocation) {
core.info(
`Cache not found for input keys: ${keys.join(", ")}.`
);
@ -78,7 +79,7 @@ async function run(): Promise<void> {
// Download the cache from the cache entry
await cacheHttpClient.downloadCache(
cacheEntry.archiveLocation,
cacheEntry?.archiveLocation,
archivePath
);
@ -89,7 +90,27 @@ async function run(): Promise<void> {
)} MB (${archiveFileSize} B)`
);
await extractTar(archivePath, cachePath);
// Create directory to extract tar into
await io.mkdirP(cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-xz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/")
]
: ["-xz", "-f", archivePath, "-C", cachePath];
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const isExactKeyMatch = utils.isExactKeyMatch(
primaryKey,

View File

@ -1,8 +1,9 @@
import * as core from "@actions/core";
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import * as path from "path";
import * as cacheHttpClient from "./cacheHttpClient";
import { Events, Inputs, State } from "./constants";
import { createTar } from "./tar";
import * as utils from "./utils/actionUtils";
async function run(): Promise<void> {
@ -36,7 +37,7 @@ async function run(): Promise<void> {
core.debug("Reserving Cache");
const cacheId = await cacheHttpClient.reserveCache(primaryKey);
if (cacheId == -1) {
if (cacheId < 0) {
core.info(
`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`
);
@ -54,7 +55,24 @@ async function run(): Promise<void> {
);
core.debug(`Archive Path: ${archivePath}`);
await createTar(archivePath, cachePath);
// http://man7.org/linux/man-pages/man1/tar.1.html
// tar [-options] <name of the tar archive> [files or directories which to add into archive]
const IS_WINDOWS = process.platform === "win32";
const args = IS_WINDOWS
? [
"-cz",
"--force-local",
"-f",
archivePath.replace(/\\/g, "/"),
"-C",
cachePath.replace(/\\/g, "/"),
"."
]
: ["-cz", "-f", archivePath, "-C", cachePath, "."];
const tarPath = await io.which("tar", true);
core.debug(`Tar Path: ${tarPath}`);
await exec(`"${tarPath}"`, args);
const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit
const archiveFileSize = utils.getArchiveFileSize(archivePath);
@ -62,13 +80,13 @@ async function run(): Promise<void> {
if (archiveFileSize > fileSizeLimit) {
utils.logWarning(
`Cache size of ~${Math.round(
archiveFileSize / (1024 * 1024)
)} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
archiveFileSize / (1024 * 1024 * 1024)
)} GB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`
);
return;
}
core.debug(`Saving Cache (ID: ${cacheId})`);
core.debug("Saving Cache");
await cacheHttpClient.saveCache(cacheId, archivePath);
} catch (error) {
utils.logWarning(error.message);

View File

@ -1,47 +0,0 @@
import { exec } from "@actions/exec";
import * as io from "@actions/io";
import { existsSync } from "fs";
async function getTarPath(): Promise<string> {
// Explicitly use BSD Tar on Windows
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
const systemTar = `${process.env["windir"]}\\System32\\tar.exe`;
if (existsSync(systemTar)) {
return systemTar;
}
}
return await io.which("tar", true);
}
async function execTar(args: string[]): Promise<void> {
try {
await exec(`"${await getTarPath()}"`, args);
} catch (error) {
const IS_WINDOWS = process.platform === "win32";
if (IS_WINDOWS) {
throw new Error(
`Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.`
);
}
throw new Error(`Tar failed with error: ${error?.message}`);
}
}
export async function extractTar(
archivePath: string,
targetDirectory: string
): Promise<void> {
// Create directory to extract tar into
await io.mkdirP(targetDirectory);
const args = ["-xz", "-f", archivePath, "-C", targetDirectory];
await execTar(args);
}
export async function createTar(
archivePath: string,
sourceDirectory: string
): Promise<void> {
const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."];
await execTar(args);
}