Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update @actions/cache version to 1.0.8 #283

Merged
merged 4 commits into from Nov 29, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion .licenses/npm/@actions/cache.dep.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion .licenses/npm/@azure/core-http.dep.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion .licenses/npm/node-fetch.dep.yml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

71 changes: 45 additions & 26 deletions dist/cache-save/index.js
Expand Up @@ -1041,10 +1041,10 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeIsBytes(filePath) {
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
Expand Down Expand Up @@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
Expand Down Expand Up @@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) {
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
Expand Down Expand Up @@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
Expand Down Expand Up @@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
});

const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL;
const URL = Url.URL || whatwgUrl.URL;

// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
Expand Down Expand Up @@ -41451,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
Expand Down Expand Up @@ -41496,18 +41496,29 @@ function saveCache(paths, key, options) {
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheId;
});
}
Expand Down Expand Up @@ -53218,7 +53229,12 @@ class HttpHeaders {
* Create a deep clone/copy of this HttpHeaders collection.
*/
clone() {
return new HttpHeaders(this.rawHeaders());
const resultPreservingCasing = {};
for (const headerKey in this._headersMap) {
const header = this._headersMap[headerKey];
resultPreservingCasing[header.name] = header.value;
}
return new HttpHeaders(resultPreservingCasing);
}
}

Expand Down Expand Up @@ -53255,7 +53271,7 @@ const Constants = {
/**
* The core-http version
*/
coreHttpVersion: "2.2.1",
coreHttpVersion: "2.2.2",
/**
* Specifies HTTP.
*/
Expand Down Expand Up @@ -55568,7 +55584,7 @@ class FetchHttpClient {
}
let downloadStreamDone = Promise.resolve();
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
}
Promise.all([uploadStreamDone, downloadStreamDone])
.then(() => {
Expand All @@ -55586,11 +55602,14 @@ class FetchHttpClient {
function isReadableStream(body) {
return body && typeof body.pipe === "function";
}
function isStreamComplete(stream) {
function isStreamComplete(stream, aborter) {
return new Promise((resolve) => {
stream.on("close", resolve);
stream.on("end", resolve);
stream.on("error", resolve);
stream.once("close", () => {
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
resolve();
});
stream.once("end", resolve);
stream.once("error", resolve);
});
}
function parseHeaders(headers) {
Expand Down
71 changes: 45 additions & 26 deletions dist/setup/index.js
Expand Up @@ -1041,10 +1041,10 @@ function createTempDirectory() {
});
}
exports.createTempDirectory = createTempDirectory;
function getArchiveFileSizeIsBytes(filePath) {
function getArchiveFileSizeInBytes(filePath) {
return fs.statSync(filePath).size;
}
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
function resolvePaths(patterns) {
var e_1, _a;
var _b;
Expand Down Expand Up @@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
function uploadFile(httpClient, cacheId, archivePath, options) {
return __awaiter(this, void 0, void 0, function* () {
// Upload Chunks
const fileSize = fs.statSync(archivePath).size;
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
const fd = fs.openSync(archivePath, 'r');
const uploadOptions = options_1.getUploadOptions(options);
Expand Down Expand Up @@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) {
yield uploadFile(httpClient, cacheId, archivePath, options);
// Commit Cache
core.debug('Commiting cache');
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
Expand Down Expand Up @@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
const contentLengthHeader = downloadResponse.message.headers['content-length'];
if (contentLengthHeader) {
const expectedLength = parseInt(contentLengthHeader);
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
if (actualLength !== expectedLength) {
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
}
Expand Down Expand Up @@ -36873,7 +36873,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
});

const INTERNALS$2 = Symbol('Request internals');
const URL = whatwgUrl.URL;
const URL = Url.URL || whatwgUrl.URL;

// fix an issue where "format", "parse" aren't a named export for node <10
const parse_url = Url.parse;
Expand Down Expand Up @@ -47198,7 +47198,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
yield tar_1.extractTar(archivePath, compressionMethod);
core.info('Cache restored successfully');
Expand Down Expand Up @@ -47243,18 +47243,29 @@ function saveCache(paths, key, options) {
const archiveFolder = yield utils.createTempDirectory();
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
core.debug(`Archive Path: ${archivePath}`);
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
try {
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
if (core.isDebug()) {
yield tar_1.listTar(archivePath, compressionMethod);
}
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
if (archiveFileSize > fileSizeLimit) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
core.debug(`Saving Cache (ID: ${cacheId})`);
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
}
finally {
// Try to delete the archive to save space
try {
yield utils.unlinkFile(archivePath);
}
catch (error) {
core.debug(`Failed to delete archive: ${error}`);
}
}
return cacheId;
});
}
Expand Down Expand Up @@ -59947,7 +59958,12 @@ class HttpHeaders {
* Create a deep clone/copy of this HttpHeaders collection.
*/
clone() {
return new HttpHeaders(this.rawHeaders());
const resultPreservingCasing = {};
for (const headerKey in this._headersMap) {
const header = this._headersMap[headerKey];
resultPreservingCasing[header.name] = header.value;
}
return new HttpHeaders(resultPreservingCasing);
}
}

Expand Down Expand Up @@ -59984,7 +60000,7 @@ const Constants = {
/**
* The core-http version
*/
coreHttpVersion: "2.2.1",
coreHttpVersion: "2.2.2",
/**
* Specifies HTTP.
*/
Expand Down Expand Up @@ -62297,7 +62313,7 @@ class FetchHttpClient {
}
let downloadStreamDone = Promise.resolve();
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
}
Promise.all([uploadStreamDone, downloadStreamDone])
.then(() => {
Expand All @@ -62315,11 +62331,14 @@ class FetchHttpClient {
function isReadableStream(body) {
return body && typeof body.pipe === "function";
}
function isStreamComplete(stream) {
function isStreamComplete(stream, aborter) {
return new Promise((resolve) => {
stream.on("close", resolve);
stream.on("end", resolve);
stream.on("error", resolve);
stream.once("close", () => {
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
resolve();
});
stream.once("end", resolve);
stream.once("error", resolve);
});
}
function parseHeaders(headers) {
Expand Down
18 changes: 9 additions & 9 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Expand Up @@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^1.0.7",
"@actions/cache": "^1.0.8",
"@actions/core": "^1.2.3",
"@actions/exec": "^1.1.0",
"@actions/glob": "^0.2.0",
Expand Down