From 92cd9936b18046483df87bcb0ac0601ddfc89aed Mon Sep 17 00:00:00 2001 From: Patrick Arminio Date: Mon, 21 Feb 2022 10:41:41 -0600 Subject: [PATCH] Build and format --- __tests__/cache-restore.test.ts | 3 +- __tests__/cache-save.test.ts | 3 +- dist/cache-save/index.js | 101 +++++++++---------------------- dist/setup/index.js | 104 +++++++++----------------------- 4 files changed, 61 insertions(+), 150 deletions(-) diff --git a/__tests__/cache-restore.test.ts b/__tests__/cache-restore.test.ts index d538b139f..596fefabe 100644 --- a/__tests__/cache-restore.test.ts +++ b/__tests__/cache-restore.test.ts @@ -10,7 +10,8 @@ describe('restore-cache', () => { 'd8110e0006d7fb5ee76365d565eef9d37df1d11598b912d3eb66d398d57a1121'; const requirementsLinuxHash = '2d0ff7f46b0e120e3d3294db65768b474934242637b9899b873e6283dfd16d7c'; - const poetryLockHash = '571bf984f8d210e6a97f854e479fdd4a2b5af67b5fdac109ec337a0ea16e7836'; + const poetryLockHash = + '571bf984f8d210e6a97f854e479fdd4a2b5af67b5fdac109ec337a0ea16e7836'; const poetryConfigOutput = ` cache-dir = "/Users/patrick/Library/Caches/pypoetry" experimental.new-installer = false diff --git a/__tests__/cache-save.test.ts b/__tests__/cache-save.test.ts index 42bb75b40..7d48fd34c 100644 --- a/__tests__/cache-save.test.ts +++ b/__tests__/cache-save.test.ts @@ -11,7 +11,8 @@ describe('run', () => { 'd8110e0006d7fb5ee76365d565eef9d37df1d11598b912d3eb66d398d57a1121'; const requirementsLinuxHash = '2d0ff7f46b0e120e3d3294db65768b474934242637b9899b873e6283dfd16d7c'; - const poetryLockHash = '571bf984f8d210e6a97f854e479fdd4a2b5af67b5fdac109ec337a0ea16e7836'; + const poetryLockHash = + '571bf984f8d210e6a97f854e479fdd4a2b5af67b5fdac109ec337a0ea16e7836'; // core spy let infoSpy: jest.SpyInstance; diff --git a/dist/cache-save/index.js b/dist/cache-save/index.js index eea7574cf..04ed408f2 100644 --- a/dist/cache-save/index.js +++ b/dist/cache-save/index.js @@ -1041,10 +1041,10 @@ function createTempDirectory() { }); } exports.createTempDirectory = createTempDirectory; -function getArchiveFileSizeInBytes(filePath) { +function getArchiveFileSizeIsBytes(filePath) { return fs.statSync(filePath).size; } -exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; +exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; function resolvePaths(patterns) { var e_1, _a; var _b; @@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { function uploadFile(httpClient, cacheId, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { // Upload Chunks - const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const fileSize = fs.statSync(archivePath).size; const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs.openSync(archivePath, 'r'); const uploadOptions = options_1.getUploadOptions(options); @@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) { yield uploadFile(httpClient, cacheId, archivePath, options); // Commit Cache core.debug('Commiting cache'); - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { @@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { const contentLengthHeader = downloadResponse.message.headers['content-length']; if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); if (actualLength !== expectedLength) { throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } @@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, { }); const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; +const URL = whatwgUrl.URL; // fix an issue where "format", "parse" aren't a named export for node <10 const parse_url = Url.parse; @@ -34585,17 +34585,9 @@ AbortError.prototype = Object.create(Error.prototype); AbortError.prototype.constructor = AbortError; AbortError.prototype.name = 'AbortError'; -const URL$1 = Url.URL || whatwgUrl.URL; - // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 const PassThrough$1 = Stream.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; - - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; +const resolve_url = Url.resolve; /** * Fetch function @@ -34683,19 +34675,7 @@ function fetch(url, opts) { const location = headers.get('Location'); // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + const locationURL = location === null ? null : resolve_url(request.url, location); // HTTP fetch step 5.5 switch (request.redirect) { @@ -34743,12 +34723,6 @@ function fetch(url, opts) { size: request.size }; - if (!isDomainOrSubdomain(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - // HTTP-redirect fetch step 9 if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); @@ -41477,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { if (core.isDebug()) { yield tar_1.listTar(archivePath, compressionMethod); } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield tar_1.extractTar(archivePath, compressionMethod); core.info('Cache restored successfully'); @@ -41522,29 +41496,18 @@ function saveCache(paths, key, options) { const archiveFolder = yield utils.createTempDirectory(); const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); - try { - yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); - if (core.isDebug()) { - yield tar_1.listTar(archivePath, compressionMethod); - } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); - } - core.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, options); - } - finally { - // Try to delete the archive to save space - try { - yield utils.unlinkFile(archivePath); - } - catch (error) { - core.debug(`Failed to delete archive: ${error}`); - } - } + yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield tar_1.listTar(archivePath, compressionMethod); + } + const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); return cacheId; }); } @@ -53255,12 +53218,7 @@ class HttpHeaders { * Create a deep clone/copy of this HttpHeaders collection. */ clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new HttpHeaders(resultPreservingCasing); + return new HttpHeaders(this.rawHeaders()); } } @@ -53297,7 +53255,7 @@ const Constants = { /** * The core-http version */ - coreHttpVersion: "2.2.2", + coreHttpVersion: "2.2.1", /** * Specifies HTTP. */ @@ -55610,7 +55568,7 @@ class FetchHttpClient { } let downloadStreamDone = Promise.resolve(); if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); } Promise.all([uploadStreamDone, downloadStreamDone]) .then(() => { @@ -55628,14 +55586,11 @@ class FetchHttpClient { function isReadableStream(body) { return body && typeof body.pipe === "function"; } -function isStreamComplete(stream, aborter) { +function isStreamComplete(stream) { return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); }); } function parseHeaders(headers) { diff --git a/dist/setup/index.js b/dist/setup/index.js index 603063926..c526ea795 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -1041,10 +1041,10 @@ function createTempDirectory() { }); } exports.createTempDirectory = createTempDirectory; -function getArchiveFileSizeInBytes(filePath) { +function getArchiveFileSizeIsBytes(filePath) { return fs.statSync(filePath).size; } -exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; +exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; function resolvePaths(patterns) { var e_1, _a; var _b; @@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { function uploadFile(httpClient, cacheId, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { // Upload Chunks - const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const fileSize = fs.statSync(archivePath).size; const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs.openSync(archivePath, 'r'); const uploadOptions = options_1.getUploadOptions(options); @@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) { yield uploadFile(httpClient, cacheId, archivePath, options); // Commit Cache core.debug('Commiting cache'); - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { @@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { const contentLengthHeader = downloadResponse.message.headers['content-length']; if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); if (actualLength !== expectedLength) { throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } @@ -36895,7 +36895,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, { }); const INTERNALS$2 = Symbol('Request internals'); -const URL = Url.URL || whatwgUrl.URL; +const URL = whatwgUrl.URL; // fix an issue where "format", "parse" aren't a named export for node <10 const parse_url = Url.parse; @@ -37158,17 +37158,9 @@ AbortError.prototype = Object.create(Error.prototype); AbortError.prototype.constructor = AbortError; AbortError.prototype.name = 'AbortError'; -const URL$1 = Url.URL || whatwgUrl.URL; - // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 const PassThrough$1 = Stream.PassThrough; - -const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { - const orig = new URL$1(original).hostname; - const dest = new URL$1(destination).hostname; - - return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); -}; +const resolve_url = Url.resolve; /** * Fetch function @@ -37256,19 +37248,7 @@ function fetch(url, opts) { const location = headers.get('Location'); // HTTP fetch step 5.3 - let locationURL = null; - try { - locationURL = location === null ? null : new URL$1(location, request.url).toString(); - } catch (err) { - // error here can only be invalid URL in Location: header - // do not throw when options.redirect == manual - // let the user extract the errorneous redirect URL - if (request.redirect !== 'manual') { - reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); - finalize(); - return; - } - } + const locationURL = location === null ? null : resolve_url(request.url, location); // HTTP fetch step 5.5 switch (request.redirect) { @@ -37316,12 +37296,6 @@ function fetch(url, opts) { size: request.size }; - if (!isDomainOrSubdomain(request.url, locationURL)) { - for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { - requestOpts.headers.delete(name); - } - } - // HTTP-redirect fetch step 9 if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); @@ -38734,8 +38708,7 @@ class PoetryCache extends cache_distributor_1.default { '--list' ]); if (exitCode && stderr) { - console.log(stdout, stderr, exitCode); - throw new Error(`Could not get cache folder path for poetry package manager`); + throw new Error('Could not get cache folder path for poetry package manager'); } const lines = stdout.trim().split(os.EOL); const config = {}; @@ -47333,7 +47306,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { if (core.isDebug()) { yield tar_1.listTar(archivePath, compressionMethod); } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield tar_1.extractTar(archivePath, compressionMethod); core.info('Cache restored successfully'); @@ -47378,29 +47351,18 @@ function saveCache(paths, key, options) { const archiveFolder = yield utils.createTempDirectory(); const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); - try { - yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); - if (core.isDebug()) { - yield tar_1.listTar(archivePath, compressionMethod); - } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); - } - core.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, options); - } - finally { - // Try to delete the archive to save space - try { - yield utils.unlinkFile(archivePath); - } - catch (error) { - core.debug(`Failed to delete archive: ${error}`); - } - } + yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); + if (core.isDebug()) { + yield tar_1.listTar(archivePath, compressionMethod); + } + const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit + const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); + core.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); + } + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, options); return cacheId; }); } @@ -60093,12 +60055,7 @@ class HttpHeaders { * Create a deep clone/copy of this HttpHeaders collection. */ clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new HttpHeaders(resultPreservingCasing); + return new HttpHeaders(this.rawHeaders()); } } @@ -60135,7 +60092,7 @@ const Constants = { /** * The core-http version */ - coreHttpVersion: "2.2.2", + coreHttpVersion: "2.2.1", /** * Specifies HTTP. */ @@ -62448,7 +62405,7 @@ class FetchHttpClient { } let downloadStreamDone = Promise.resolve(); if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); + downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); } Promise.all([uploadStreamDone, downloadStreamDone]) .then(() => { @@ -62466,14 +62423,11 @@ class FetchHttpClient { function isReadableStream(body) { return body && typeof body.pipe === "function"; } -function isStreamComplete(stream, aborter) { +function isStreamComplete(stream) { return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); }); } function parseHeaders(headers) {