From 2e0d362ec5cf81e334dda822c49c96dcd4b7df2c Mon Sep 17 00:00:00 2001 From: Rob Herley Date: Thu, 19 May 2022 20:33:04 +0000 Subject: [PATCH 1/3] bump @actions/artifact to 1.1.0 --- .licenses/npm/@actions/artifact.dep.yml | 2 +- dist/index.js | 16040 +++++++++++----------- package-lock.json | 39 +- package.json | 2 +- 4 files changed, 8262 insertions(+), 7821 deletions(-) diff --git a/.licenses/npm/@actions/artifact.dep.yml b/.licenses/npm/@actions/artifact.dep.yml index 2e105a0e..c5c83bf6 100644 --- a/.licenses/npm/@actions/artifact.dep.yml +++ b/.licenses/npm/@actions/artifact.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/artifact" -version: 1.0.0 +version: 1.1.0 type: npm summary: homepage: diff --git a/dist/index.js b/dist/index.js index 0de67fda..fda863fb 100644 --- a/dist/index.js +++ b/dist/index.js @@ -19,7 +19,13 @@ module.exports = /******/ }; /******/ /******/ // Execute the module function -/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ var threw = true; +/******/ try { +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete installedModules[moduleId]; +/******/ } /******/ /******/ // Flag the module as loaded /******/ module.l = true; @@ -34,7 +40,7 @@ module.exports = /******/ // the startup function /******/ function startup() { /******/ // Load entry module and return exports -/******/ return __webpack_require__(385); +/******/ return __webpack_require__(799); /******/ }; /******/ /******/ // run startup @@ -43,4275 +49,4172 @@ module.exports = /************************************************************************/ /******/ ({ -/***/ 11: +/***/ 16: /***/ (function(module) { -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) +module.exports = require("tls"); - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') +/***/ }), - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) +/***/ 20: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - return wrapper +"use strict"; - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DownloadHttpClient = void 0; +const fs = __importStar(__webpack_require__(747)); +const core = __importStar(__webpack_require__(676)); +const zlib = __importStar(__webpack_require__(761)); +const utils_1 = __webpack_require__(128); +const url_1 = __webpack_require__(835); +const status_reporter_1 = __webpack_require__(722); +const perf_hooks_1 = __webpack_require__(630); +const http_manager_1 = __webpack_require__(851); +const config_variables_1 = __webpack_require__(750); +const requestUtils_1 = __webpack_require__(682); +class DownloadHttpClient { + constructor() { + this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download'); + // downloads are usually significantly faster than uploads so display status information every second + this.statusReporter = new status_reporter_1.StatusReporter(1000); } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) + /** + * Gets a list of all artifacts that are in a specific container + */ + listArtifacts() { + return __awaiter(this, void 0, void 0, function* () { + const artifactUrl = utils_1.getArtifactUrl(); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0); + const headers = utils_1.getDownloadHeaders('application/json'); + const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); + const body = yield response.readBody(); + return JSON.parse(body); + }); + } + /** + * Fetches a set of container items that describe the contents of an artifact + * @param artifactName the name of the artifact + * @param containerUrl the artifact container URL for the run + */ + getContainerItems(artifactName, containerUrl) { + return __awaiter(this, void 0, void 0, function* () { + // the itemPath search parameter controls which containers will be returned + const resourceUrl = new url_1.URL(containerUrl); + resourceUrl.searchParams.append('itemPath', artifactName); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0); + const headers = utils_1.getDownloadHeaders('application/json'); + const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); + const body = yield response.readBody(); + return JSON.parse(body); + }); + } + /** + * Concurrently downloads all the files that are part of an artifact + * @param downloadItems information about what items to download and where to save them + */ + downloadSingleArtifact(downloadItems) { + return __awaiter(this, void 0, void 0, function* () { + const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency(); + // limit the number of files downloaded at a single time + core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; + let currentFile = 0; + let downloadedFiles = 0; + core.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); + this.statusReporter.start(); + yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < downloadItems.length) { + const currentFileToDownload = downloadItems[currentFile]; + currentFile += 1; + const startTime = perf_hooks_1.performance.now(); + yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); + if (core.isDebug()) { + core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + } + this.statusReporter.incrementProcessedCount(); + } + }))) + .catch(error => { + throw new Error(`Unable to download the artifact: ${error}`); + }) + .finally(() => { + this.statusReporter.stop(); + // safety dispose all connections + this.downloadHttpManager.disposeAndReplaceAllClients(); + }); + }); + } + /** + * Downloads an individual file + * @param httpClientIndex the index of the http client that is used to make all of the calls + * @param artifactLocation origin location where a file will be downloaded from + * @param downloadPath destination location for the file being downloaded + */ + downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) { + return __awaiter(this, void 0, void 0, function* () { + let retryCount = 0; + const retryLimit = config_variables_1.getRetryLimit(); + let destinationStream = fs.createWriteStream(downloadPath); + const headers = utils_1.getDownloadHeaders('application/json', true, true); + // a single GET request is used to download a file + const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.downloadHttpManager.getClient(httpClientIndex); + return yield client.get(artifactLocation, headers); + }); + // check the response headers to determine if the file was compressed using gzip + const isGzip = (incomingHeaders) => { + return ('content-encoding' in incomingHeaders && + incomingHeaders['content-encoding'] === 'gzip'); + }; + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided, + // it will be used + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + retryCount++; + if (retryCount > retryLimit) { + return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`)); + } + else { + this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + // Back off by waiting the specified time denoted by the retry-after header + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + yield utils_1.sleep(retryAfterValue); + } + else { + // Back off using an exponential value that depends on the retry count + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + yield utils_1.sleep(backoffTime); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with download`); + } + }); + const isAllBytesReceived = (expected, received) => { + // be lenient, if any input is missing, assume success, i.e. not truncated + if (!expected || + !received || + process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) { + core.info('Skipping download validation.'); + return true; + } + return parseInt(expected) === received; + }; + const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () { + destinationStream.close(); + yield utils_1.rmFile(fileDownloadPath); + destinationStream = fs.createWriteStream(fileDownloadPath); + }); + // keep trying to download a file until a retry limit has been reached + while (retryCount <= retryLimit) { + let response; + try { + response = yield makeDownloadRequest(); + } + catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the download + core.info('An error occurred while attempting to download a file'); + // eslint-disable-next-line no-console + console.log(error); + // increment the retryCount and use exponential backoff to wait before making the next request + yield backOff(); + continue; + } + let forceRetry = false; + if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string + // which can cause some gzip encoded data to be lost + // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents + try { + const isGzipped = isGzip(response.message.headers); + yield this.pipeResponseToFile(response, destinationStream, isGzipped); + if (isGzipped || + isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) { + return; + } + else { + forceRetry = true; + } + } + catch (error) { + // retry on error, most likely streams were corrupted + forceRetry = true; + } + } + if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + resetDestinationStream(downloadPath); + // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff + utils_1.isThrottledStatusCode(response.message.statusCode) + ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + : yield backOff(); + } + else { + // Some unexpected response code, fail immediately and stop the download + utils_1.displayHttpDiagnostics(response); + return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`)); + } + } + }); + } + /** + * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary + * @param response the http response received when downloading a file + * @param destinationStream the stream where the file should be written to + * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it + */ + pipeResponseToFile(response, destinationStream, isGzip) { + return __awaiter(this, void 0, void 0, function* () { + yield new Promise((resolve, reject) => { + if (isGzip) { + const gunzip = zlib.createGunzip(); + response.message + .on('error', error => { + core.error(`An error occurred while attempting to read the response stream`); + gunzip.close(); + destinationStream.close(); + reject(error); + }) + .pipe(gunzip) + .on('error', error => { + core.error(`An error occurred while attempting to decompress the response stream`); + destinationStream.close(); + reject(error); + }) + .pipe(destinationStream) + .on('close', () => { + resolve(); + }) + .on('error', error => { + core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + reject(error); + }); + } + else { + response.message + .on('error', error => { + core.error(`An error occurred while attempting to read the response stream`); + destinationStream.close(); + reject(error); + }) + .pipe(destinationStream) + .on('close', () => { + resolve(); + }) + .on('error', error => { + core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + reject(error); + }); + } + }); + return; + }); } - return ret - } -} - - -/***/ }), - -/***/ 13: -/***/ (function(module, __unusedexports, __webpack_require__) { - -const assert = __webpack_require__(357) -const path = __webpack_require__(622) -const fs = __webpack_require__(747) -let glob = undefined -try { - glob = __webpack_require__(402) -} catch (_err) { - // treat glob as optional. -} - -const defaultGlobOpts = { - nosort: true, - silent: true -} - -// for EMFILE handling -let timeout = 0 - -const isWindows = (process.platform === "win32") - -const defaults = options => { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 - options.emfileWait = options.emfileWait || 1000 - if (options.glob === false) { - options.disableGlob = true - } - if (options.disableGlob !== true && glob === undefined) { - throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') - } - options.disableGlob = options.disableGlob || false - options.glob = options.glob || defaultGlobOpts -} - -const rimraf = (p, options, cb) => { - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert.equal(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - let busyTries = 0 - let errState = null - let n = 0 - - const next = (er) => { - errState = errState || er - if (--n === 0) - cb(errState) - } - - const afterGlob = (er, results) => { - if (er) - return cb(er) - - n = results.length - if (n === 0) - return cb() - - results.forEach(p => { - const CB = (er) => { - if (er) { - if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && - busyTries < options.maxBusyTries) { - busyTries ++ - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) - } - - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < options.emfileWait) { - return setTimeout(() => rimraf_(p, options, CB), timeout ++) - } - - // already gone - if (er.code === "ENOENT") er = null - } - - timeout = 0 - next(er) - } - rimraf_(p, options, CB) - }) - } - - if (options.disableGlob || !glob.hasMagic(p)) - return afterGlob(null, [p]) - - options.lstat(p, (er, stat) => { - if (!er) - return afterGlob(null, [p]) - - glob(p, options.glob, afterGlob) - }) - -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -const rimraf_ = (p, options, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === "ENOENT") - return cb(null) - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === "EPERM" && isWindows) - fixWinEPERM(p, options, er, cb) - - if (st && st.isDirectory()) - return rmdir(p, options, er, cb) - - options.unlink(p, er => { - if (er) { - if (er.code === "ENOENT") - return cb(null) - if (er.code === "EPERM") - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - if (er.code === "EISDIR") - return rmdir(p, options, er, cb) - } - return cb(er) - }) - }) -} - -const fixWinEPERM = (p, options, er, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - options.stat(p, (er3, stats) => { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, options, er, cb) - else - options.unlink(p, cb) - }) - }) } +exports.DownloadHttpClient = DownloadHttpClient; +//# sourceMappingURL=download-http-client.js.map -const fixWinEPERMSync = (p, options, er) => { - assert(p) - assert(options) +/***/ }), - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === "ENOENT") - return - else - throw er - } +/***/ 26: +/***/ (function(module, __unusedexports, __webpack_require__) { - let stats - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === "ENOENT") - return - else - throw er - } +/*! + * Tmp + * + * Copyright (c) 2011-2017 KARASZI Istvan + * + * MIT Licensed + */ - if (stats.isDirectory()) - rmdirSync(p, options, er) - else - options.unlinkSync(p) -} +/* + * Module dependencies. + */ +const fs = __webpack_require__(747); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +const crypto = __webpack_require__(417); +const _c = { fs: fs.constants, os: os.constants }; +const rimraf = __webpack_require__(254); -const rmdir = (p, options, originalEr, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') +/* + * The working inner variables. + */ +const + // the random characters to choose from + RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) - rmkids(p, options, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) -} + TEMPLATE_PATTERN = /XXXXXX/, -const rmkids = (p, options, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') + DEFAULT_TRIES = 3, - options.readdir(p, (er, files) => { - if (er) - return cb(er) - let n = files.length - if (n === 0) - return options.rmdir(p, cb) - let errState - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - options.rmdir(p, cb) - }) - }) - }) -} + CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -const rimrafSync = (p, options) => { - options = options || {} - defaults(options) + // constants are off on the windows platform and will not match the actual errno codes + IS_WIN32 = os.platform() === 'win32', + EBADF = _c.EBADF || _c.os.errno.EBADF, + ENOENT = _c.ENOENT || _c.os.errno.ENOENT, - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.equal(typeof options, 'object', 'rimraf: options should be object') + DIR_MODE = 0o700 /* 448 */, + FILE_MODE = 0o600 /* 384 */, - let results + EXIT = 'exit', - if (options.disableGlob || !glob.hasMagic(p)) { - results = [p] - } else { - try { - options.lstatSync(p) - results = [p] - } catch (er) { - results = glob.sync(p, options.glob) - } - } + // this will hold the objects need to be removed on exit + _removeObjects = [], - if (!results.length) - return + // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback + FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), + FN_RIMRAF_SYNC = rimraf.sync; - for (let i = 0; i < results.length; i++) { - const p = results[i] +let + _gracefulCleanup = false; - let st - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === "ENOENT") - return +/** + * Gets a temporary file name. + * + * @param {(Options|tmpNameCallback)} options options or callback + * @param {?tmpNameCallback} callback the callback function + */ +function tmpName(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; - // Windows can EPERM on stat. Life is suffering. - if (er.code === "EPERM" && isWindows) - fixWinEPERMSync(p, options, er) - } + try { + _assertAndSanitizeOptions(opts); + } catch (err) { + return cb(err); + } + let tries = opts.tries; + (function _getUniqueName() { try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) - rmdirSync(p, options, null) - else - options.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - if (er.code !== "EISDIR") - throw er + const name = _generateTmpName(opts); - rmdirSync(p, options, er) - } - } -} + // check whether the path exists then retry if needed + fs.stat(name, function (err) { + /* istanbul ignore else */ + if (!err) { + /* istanbul ignore else */ + if (tries-- > 0) return _getUniqueName(); -const rmdirSync = (p, options, originalEr) => { - assert(p) - assert(options) + return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); + } - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") - rmkidsSync(p, options) - } + cb(null, name); + }); + } catch (err) { + cb(err); + } + }()); } -const rmkidsSync = (p, options) => { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) +/** + * Synchronous version of tmpName. + * + * @param {Object} options + * @returns {string} the generated random name + * @throws {Error} if the options are invalid or could not generate a filename + */ +function tmpNameSync(options) { + const + args = _parseArguments(options), + opts = args[0]; - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const retries = isWindows ? 100 : 1 - let i = 0 + _assertAndSanitizeOptions(opts); + + let tries = opts.tries; do { - let threw = true + const name = _generateTmpName(opts); try { - const ret = options.rmdirSync(p, options) - threw = false - return ret - } finally { - if (++i < retries && threw) - continue + fs.statSync(name); + } catch (e) { + return name; } - } while (true) -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), + } while (tries-- > 0); -/***/ 16: -/***/ (function(module) { + throw new Error('Could not get a unique tmp filename, max tries reached'); +} -module.exports = require("tls"); +/** + * Creates and opens a temporary file. + * + * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined + * @param {?fileCallback} callback + */ +function file(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; -/***/ }), + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); -/***/ 49: -/***/ (function(module, __unusedexports, __webpack_require__) { + // create and open the file + fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { + /* istanbu ignore else */ + if (err) return cb(err); -var wrappy = __webpack_require__(11) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) + if (opts.discardDescriptor) { + return fs.close(fd, function _discardCallback(possibleErr) { + // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only + return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); + }); + } else { + // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care + // about the descriptor + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); + } + }); + }); +} - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +/** + * Synchronous version of file. + * + * @param {Options} options + * @returns {FileSyncObject} object consists of name, fd and removeCallback + * @throws {Error} if cannot create a file + */ +function fileSync(options) { + const + args = _parseArguments(options), + opts = args[0]; -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + const name = tmpNameSync(opts); + var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + /* istanbul ignore else */ + if (opts.discardDescriptor) { + fs.closeSync(fd); + fd = undefined; } - f.called = false - return f + + return { + name: name, + fd: fd, + removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) + }; } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f +/** + * Creates a temporary directory. + * + * @param {(Options|dirCallback)} options the options or the callback function + * @param {?dirCallback} callback + */ +function dir(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create the directory + fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { + /* istanbul ignore else */ + if (err) return cb(err); + + cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); + }); + }); } +/** + * Synchronous version of dir. + * + * @param {Options} options + * @returns {DirSyncObject} object consists of name and removeCallback + * @throws {Error} if it cannot create a directory + */ +function dirSync(options) { + const + args = _parseArguments(options), + opts = args[0]; -/***/ }), + const name = tmpNameSync(opts); + fs.mkdirSync(name, opts.mode || DIR_MODE); -/***/ 82: -/***/ (function(__unusedmodule, exports) { + return { + name: name, + removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) + }; +} -"use strict"; +/** + * Removes files asynchronously. + * + * @param {Object} fdPath + * @param {Function} next + * @private + */ +function _removeFileAsync(fdPath, next) { + const _handler = function (err) { + if (err && !_isENOENT(err)) { + // reraise any unanticipated error + return next(err); + } + next(); + }; + + if (0 <= fdPath[0]) + fs.close(fdPath[0], function () { + fs.unlink(fdPath[1], _handler); + }); + else fs.unlink(fdPath[1], _handler); +} -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", { value: true }); /** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string + * Removes files synchronously. + * + * @param {Object} fdPath + * @private */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; +function _removeFileSync(fdPath) { + let rethrownException = null; + try { + if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); + } catch (e) { + // reraise any unanticipated error + if (!_isEBADF(e) && !_isENOENT(e)) throw e; + } finally { + try { + fs.unlinkSync(fdPath[1]); } - else if (typeof input === 'string' || input instanceof String) { - return input; + catch (e) { + // reraise any unanticipated error + if (!_isENOENT(e)) rethrownException = e; } - return JSON.stringify(input); + } + if (rethrownException !== null) { + throw rethrownException; + } } -exports.toCommandValue = toCommandValue; -//# sourceMappingURL=utils.js.map -/***/ }), +/** + * Prepares the callback for removal of the temporary file. + * + * Returns either a sync callback or a async callback depending on whether + * fileSync or file was called, which is expressed by the sync parameter. + * + * @param {string} name the path of the file + * @param {number} fd file descriptor + * @param {Object} opts + * @param {boolean} sync + * @returns {fileCallback | fileCallbackSync} + * @private + */ +function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { + const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); + const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); -/***/ 87: -/***/ (function(module) { + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); -module.exports = require("os"); + return sync ? removeCallbackSync : removeCallback; +} -/***/ }), +/** + * Prepares the callback for removal of the temporary directory. + * + * Returns either a sync callback or a async callback depending on whether + * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. + * + * @param {string} name + * @param {Object} opts + * @param {boolean} sync + * @returns {Function} the callback + * @private + */ +function _prepareTmpDirRemoveCallback(name, opts, sync) { + const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); + const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; + const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); + const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); -/***/ 93: -/***/ (function(module, __unusedexports, __webpack_require__) { + return sync ? removeCallbackSync : removeCallback; +} -module.exports = minimatch -minimatch.Minimatch = Minimatch +/** + * Creates a guarded function wrapping the removeFunction call. + * + * The cleanup callback is save to be called multiple times. + * Subsequent invocations will be ignored. + * + * @param {Function} removeFunction + * @param {string} fileOrDirName + * @param {boolean} sync + * @param {cleanupCallbackSync?} cleanupCallbackSync + * @returns {cleanupCallback | cleanupCallbackSync} + * @private + */ +function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { + let called = false; -var path = { sep: '/' } -try { - path = __webpack_require__(622) -} catch (er) {} + // if sync is true, the next parameter will be ignored + return function _cleanupCallback(next) { -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __webpack_require__(306) + /* istanbul ignore else */ + if (!called) { + // remove cleanupCallback from cache + const toRemove = cleanupCallbackSync || _cleanupCallback; + const index = _removeObjects.indexOf(toRemove); + /* istanbul ignore else */ + if (index >= 0) _removeObjects.splice(index, 1); -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' - -// * => any number of characters -var star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') - -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) + called = true; + if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { + return removeFunction(fileOrDirName); + } else { + return removeFunction(fileOrDirName, next || function() {}); + } + } + }; } -// normalizes slashes. -var slashSplit = /\/+/ +/** + * The garbage collector. + * + * @private + */ +function _garbageCollector() { + /* istanbul ignore else */ + if (!_gracefulCleanup) return; -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) + // the function being called removes itself from _removeObjects, + // loop until _removeObjects is empty + while (_removeObjects.length) { + try { + _removeObjects[0](); + } catch (e) { + // already removed? + } } } -function ext (a, b) { - a = a || {} - b = b || {} - var t = {} - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - return t -} - -minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return minimatch - - var orig = minimatch +/** + * Random name generator based on crypto. + * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript + * + * @param {number} howMany + * @returns {string} the generated random name + * @private + */ +function _randomChars(howMany) { + let + value = [], + rnd = null; - var m = function minimatch (p, pattern, options) { - return orig.minimatch(p, pattern, ext(def, options)) + // make sure that we do not fail because we ran out of entropy + try { + rnd = crypto.randomBytes(howMany); + } catch (e) { + rnd = crypto.pseudoRandomBytes(howMany); } - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) + for (var i = 0; i < howMany; i++) { + value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); } - return m + return value.join(''); } -Minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return Minimatch - return minimatch.defaults(def).Minimatch +/** + * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * + * @private + * @param {string} s + * @returns {Boolean} true whether the string s is blank, false otherwise + */ +function _isBlank(s) { + return s === null || _isUndefined(s) || !s.trim(); } -function minimatch (p, pattern, options) { - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } +/** + * Checks whether the `obj` parameter is defined or not. + * + * @param {Object} obj + * @returns {boolean} true if the object is undefined + * @private + */ +function _isUndefined(obj) { + return typeof obj === 'undefined'; +} - if (!options) options = {} +/** + * Parses the function arguments. + * + * This function helps to have optional arguments. + * + * @param {(Options|null|undefined|Function)} options + * @param {?Function} callback + * @returns {Array} parsed arguments + * @private + */ +function _parseArguments(options, callback) { + /* istanbul ignore else */ + if (typeof options === 'function') { + return [{}, options]; + } - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false + /* istanbul ignore else */ + if (_isUndefined(options)) { + return [{}, callback]; } - // "" only matches "" - if (pattern.trim() === '') return p === '' + // copy options so we do not leak the changes we make internally + const actualOptions = {}; + for (const key of Object.getOwnPropertyNames(options)) { + actualOptions[key] = options[key]; + } - return new Minimatch(pattern, options).match(p) + return [actualOptions, callback]; } -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } +/** + * Generates a new temporary name. + * + * @param {Object} opts + * @returns {string} the new random name according to opts + * @private + */ +function _generateTmpName(opts) { - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } + const tmpDir = opts.tmpdir; - if (!options) options = {} - pattern = pattern.trim() + /* istanbul ignore else */ + if (!_isUndefined(opts.name)) + return path.join(tmpDir, opts.dir, opts.name); - // windows support: need to use /, not \ - if (path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } + /* istanbul ignore else */ + if (!_isUndefined(opts.template)) + return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false + // prefix and postfix + const name = [ + opts.prefix ? opts.prefix : 'tmp', + '-', + process.pid, + '-', + _randomChars(12), + opts.postfix ? '-' + opts.postfix : '' + ].join(''); - // make the set of regexps etc. - this.make() + return path.join(tmpDir, opts.dir, name); } -Minimatch.prototype.debug = function () {} +/** + * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing + * options. + * + * @param {Options} options + * @private + */ +function _assertAndSanitizeOptions(options) { -Minimatch.prototype.make = make -function make () { - // don't do it more than once. - if (this._made) return + options.tmpdir = _getTmpDir(options); - var pattern = this.pattern - var options = this.options + const tmpDir = options.tmpdir; - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return + /* istanbul ignore else */ + if (!_isUndefined(options.name)) + _assertIsRelative(options.name, 'name', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.dir)) + _assertIsRelative(options.dir, 'dir', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.template)) { + _assertIsRelative(options.template, 'template', tmpDir); + if (!options.template.match(TEMPLATE_PATTERN)) + throw new Error(`Invalid template, found "${options.template}".`); } + /* istanbul ignore else */ + if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) + throw new Error(`Invalid tries, found "${options.tries}".`); - // step 1: figure out negation, etc. - this.parseNegate() - - // step 2: expand braces - var set = this.globSet = this.braceExpand() - - if (options.debug) this.debug = console.error + // if a name was specified we will try once + options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; + options.keep = !!options.keep; + options.detachDescriptor = !!options.detachDescriptor; + options.discardDescriptor = !!options.discardDescriptor; + options.unsafeCleanup = !!options.unsafeCleanup; - this.debug(this.pattern, set) - - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) - - this.debug(this.pattern, set) - - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) - - this.debug(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) - - this.debug(this.pattern, set) + // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); + options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); + // sanitize further if template is relative to options.dir + options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); - this.set = set + // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); + options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; + options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; } -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 - - if (options.nonegate) return - - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ +/** + * Resolve the specified path name in respect to tmpDir. + * + * The specified name might include relative path components, e.g. ../ + * so we need to resolve in order to be sure that is is located inside tmpDir + * + * @param name + * @param tmpDir + * @returns {string} + * @private + */ +function _resolvePath(name, tmpDir) { + const sanitizedName = _sanitizeName(name); + if (sanitizedName.startsWith(tmpDir)) { + return path.resolve(sanitizedName); + } else { + return path.resolve(path.join(tmpDir, sanitizedName)); } - - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate } -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) +/** + * Sanitize the specified path name by removing all quote characters. + * + * @param name + * @returns {string} + * @private + */ +function _sanitizeName(name) { + if (_isBlank(name)) { + return name; + } + return name.replace(/["']/g, ''); } -Minimatch.prototype.braceExpand = braceExpand - -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options - } else { - options = {} +/** + * Asserts whether specified name is relative to the specified tmpDir. + * + * @param {string} name + * @param {string} option + * @param {string} tmpDir + * @throws {Error} + * @private + */ +function _assertIsRelative(name, option, tmpDir) { + if (option === 'name') { + // assert that name is not absolute and does not contain a path + if (path.isAbsolute(name)) + throw new Error(`${option} option must not contain an absolute path, found "${name}".`); + // must not fail on valid . or .. or similar such constructs + let basename = path.basename(name); + if (basename === '..' || basename === '.' || basename !== name) + throw new Error(`${option} option must not contain a path, found "${name}".`); + } + else { // if (option === 'dir' || option === 'template') { + // assert that dir or template are relative to tmpDir + if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { + throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); } + let resolvedPath = _resolvePath(name, tmpDir); + if (!resolvedPath.startsWith(tmpDir)) + throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); } +} - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern - - if (typeof pattern === 'undefined') { - throw new TypeError('undefined pattern') - } +/** + * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isEBADF(error) { + return _isExpectedError(error, -EBADF, 'EBADF'); +} - if (options.nobrace || - !pattern.match(/\{.*\}/)) { - // shortcut. no need to expand. - return [pattern] - } +/** + * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isENOENT(error) { + return _isExpectedError(error, -ENOENT, 'ENOENT'); +} - return expand(pattern) +/** + * Helper to determine whether the expected error code matches the actual code and errno, + * which will differ between the supported node versions. + * + * - Node >= 7.0: + * error.code {string} + * error.errno {number} any numerical value will be negated + * + * CAVEAT + * + * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT + * is no different here. + * + * @param {SystemError} error + * @param {number} errno + * @param {string} code + * @private + */ +function _isExpectedError(error, errno, code) { + return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; } -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - if (pattern.length > 1024 * 64) { - throw new TypeError('pattern is too long') - } +/** + * Sets the graceful cleanup. + * + * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the + * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary + * object removals. + */ +function setGracefulCleanup() { + _gracefulCleanup = true; +} - var options = this.options +/** + * Returns the currently configured tmp dir from os.tmpdir(). + * + * @private + * @param {?Options} options + * @returns {string} the currently configured tmp dir + */ +function _getTmpDir(options) { + return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); +} - // shortcuts - if (!options.noglobstar && pattern === '**') return GLOBSTAR - if (pattern === '') return '' +// Install process exit listener +process.addListener(EXIT, _garbageCollector); - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this +/** + * Configuration options. + * + * @typedef {Object} Options + * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected + * @property {?number} tries the number of tries before give up the name generation + * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files + * @property {?string} template the "mkstemp" like filename template + * @property {?string} name fixed name relative to tmpdir or the specified dir option + * @property {?string} dir tmp directory relative to the root tmp directory in use + * @property {?string} prefix prefix for the generated name + * @property {?string} postfix postfix for the generated name + * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir + * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty + * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection + * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection + */ - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } - - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) - - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue - } - - switch (c) { - case '/': - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - - case '\\': - clearStateChar() - escaping = true - continue +/** + * @typedef {Object} FileSyncObject + * @property {string} name the name of the file + * @property {string} fd the file descriptor or -1 if the fd has been discarded + * @property {fileCallback} removeCallback the callback function to remove the file + */ - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) +/** + * @typedef {Object} DirSyncObject + * @property {string} name the name of the directory + * @property {fileCallback} removeCallback the callback function to remove the directory + */ - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } +/** + * @callback tmpNameCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + */ - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue +/** + * @callback fileCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallback} fn the cleanup callback function + */ - case '(': - if (inClass) { - re += '(' - continue - } +/** + * @callback fileCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallbackSync} fn the cleanup callback function + */ - if (!stateChar) { - re += '\\(' - continue - } +/** + * @callback dirCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallback} fn the cleanup callback function + */ - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue +/** + * @callback dirCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallbackSync} fn the cleanup callback function + */ - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallback + * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed + */ - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallbackSync + */ - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } +/** + * Callback function for function composition. + * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} + * + * @callback simpleCallback + */ - clearStateChar() - re += '|' - continue +// exporting all the needed methods - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() +// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will +// allow users to reconfigure the temporary directory +Object.defineProperty(module.exports, 'tmpdir', { + enumerable: true, + configurable: false, + get: function () { + return _getTmpDir(); + } +}); - if (inClass) { - re += '\\' + c - continue - } +module.exports.dir = dir; +module.exports.dirSync = dirSync; - inClass = true - classStart = i - reClassStart = re.length - re += c - continue +module.exports.file = file; +module.exports.fileSync = fileSync; - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } +module.exports.tmpName = tmpName; +module.exports.tmpNameSync = tmpNameSync; - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - if (inClass) { - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } - } +module.exports.setGracefulCleanup = setGracefulCleanup; - // finish up the class. - hasMagic = true - inClass = false - re += c - continue - default: - // swallow any state char that wasn't consumed - clearStateChar() +/***/ }), - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } +/***/ 30: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - re += c +"use strict"; - } // switch - } // for +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUploadSpecification = void 0; +const fs = __importStar(__webpack_require__(747)); +const core_1 = __webpack_require__(676); +const path_1 = __webpack_require__(622); +const path_and_artifact_name_validation_1 = __webpack_require__(547); +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { + // artifact name was checked earlier on, no need to check again + const specifications = []; + if (!fs.existsSync(rootDirectory)) { + throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); + } + if (!fs.lstatSync(rootDirectory).isDirectory()) { + throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); + } + // Normalize and resolve, this allows for either absolute or relative paths to be used + rootDirectory = path_1.normalize(rootDirectory); + rootDirectory = path_1.resolve(rootDirectory); + /* + Example to demonstrate behavior + + Input: + artifactName: my-artifact + rootDirectory: '/home/user/files/plz-upload' + artifactFiles: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ] + + Output: + specifications: [ + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] + ] + */ + for (let file of artifactFiles) { + if (!fs.existsSync(file)) { + throw new Error(`File ${file} does not exist`); + } + if (!fs.lstatSync(file).isDirectory()) { + // Normalize and resolve, this allows for either absolute or relative paths to be used + file = path_1.normalize(file); + file = path_1.resolve(file); + if (!file.startsWith(rootDirectory)) { + throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); + } + // Check for forbidden characters in file paths that will be rejected during upload + const uploadPath = file.replace(rootDirectory, ''); + path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); + /* + uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all + be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts + + path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt + join('artifact-name/', 'file-to-upload.txt') + join('artifact-name/', '/file-to-upload.txt') + join('artifact-name', 'file-to-upload.txt') + join('artifact-name', '/file-to-upload.txt') + */ + specifications.push({ + absoluteFilePath: file, + uploadFilePath: path_1.join(artifactName, uploadPath) + }); + } + else { + // Directories are rejected by the server during upload + core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); + } + } + return specifications; +} +exports.getUploadSpecification = getUploadSpecification; +//# sourceMappingURL=upload-specification.js.map - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } +/***/ }), - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } +/***/ 50: +/***/ (function(module) { - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } + return wrapper - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case '.': - case '[': - case '(': addPatternStart = true + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret } +} - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n] - - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) - nlLast += nlAfter +/***/ }), - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter +/***/ 51: +/***/ (function(module, __unusedexports, __webpack_require__) { - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' - } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe - } +module.exports = globSync +globSync.GlobSync = GlobSync - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } +var fs = __webpack_require__(747) +var rp = __webpack_require__(909) +var minimatch = __webpack_require__(727) +var Minimatch = minimatch.Minimatch +var Glob = __webpack_require__(93).Glob +var util = __webpack_require__(669) +var path = __webpack_require__(622) +var assert = __webpack_require__(357) +var isAbsolute = __webpack_require__(409) +var common = __webpack_require__(459) +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored - if (addPatternStart) { - re = patternStart + re - } +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } + return new GlobSync(pattern, options).found +} - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') - regExp._glob = pattern - regExp._src = re + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) - return regExp + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() } -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) } -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) - if (!set.length) { - this.regexp = false - return this.regexp + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ } - var options = this.options + // now n is the index of the first one that is *not* a string. - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' + var remain = pattern.slice(n) - try { - this.regexp = new RegExp(re, flags) - } catch (ex) { - this.regexp = false - } - return this.regexp -} + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) } -Minimatch.prototype.match = match -function match (f, partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - if (f === '/' && partial) return true +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) - var options = this.options + // if the abs isn't a dir, then nothing can match! + if (!entries) + return - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } } - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. - var set = this.set - this.debug(this.pattern, 'set', set) + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) } + // This was the last one, and no stats were needed + return } - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } } -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return - this.debug(pattern, p, f) + var abs = this._makeAbs(e) - // should be impossible. - // some invalid regexp stuff in the set. - if (p === false) return false + if (this.mark) + e = this._mark(e) - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) + if (this.absolute) { + e = abs + } - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } + if (this.matches[index][e]) + return - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + this.matches[index][e] = true - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } + if (this.stat) + this._stat(e) +} - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - if (options.nocase) { - hit = f.toLowerCase() === p.toLowerCase() - } else { - hit = f === p - } - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null } - - if (!hit) return false } - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') - return emptyFileEnd - } + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym - // should be unreachable. - throw new Error('wtf?') -} + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') + return entries } -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -} +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) -/***/ }), + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null -/***/ 102: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + if (Array.isArray(c)) + return c + } -"use strict"; + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} -// For internal use, subject to change. -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__webpack_require__(747)); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(82); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map + } -/***/ }), + this.cache[abs] = entries -/***/ 117: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + // mark and cache dir-ness + return entries +} -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var pathModule = __webpack_require__(622); -var isWindows = process.platform === 'win32'; -var fs = __webpack_require__(747); +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break -// JavaScript implementation of realpath, ported from node pre-v6 + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break -var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} -function rethrow() { - // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and - // is fairly slow to generate. - var callback; - if (DEBUG) { - var backtrace = new Error; - callback = debugCallback; - } else - callback = missingCallback; +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - return callback; + var entries = this._readdir(abs, inGlobStar) - function debugCallback(err) { - if (err) { - backtrace.message = err.message; - err = backtrace; - missingCallback(err); - } - } + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return - function missingCallback(err) { - if (err) { - if (process.throwDeprecation) - throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs - else if (!process.noDeprecation) { - var msg = 'fs: missing callback ' + (err.stack || err.message); - if (process.traceDeprecation) - console.trace(msg); - else - console.error(msg); - } - } - } -} + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) -function maybeCallback(cb) { - return typeof cb === 'function' ? cb : rethrow(); -} + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) -var normalize = pathModule.normalize; + var len = entries.length + var isSym = this.symlinks[abs] -// Regexp that finds the next partion of a (partial) path -// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] -if (isWindows) { - var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; -} else { - var nextPartRe = /(.*?)(?:[\/]+|$)/g; -} + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return -// Regex to find the device root, including trailing slash. E.g. 'c:\\'. -if (isWindows) { - var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; -} else { - var splitRootRe = /^[\/]*/; -} + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue -exports.realpathSync = function realpathSync(p, cache) { - // make p is absolute - p = pathModule.resolve(p); + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return cache[p]; + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) } +} - var original = p, - seenLinks = {}, - knownHard = {}; - - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) - start(); + if (!this.matches[index]) + this.matches[index] = Object.create(null) - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; + // If it doesn't exist, then just mark the lack of results + if (!exists) + return - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstatSync(base); - knownHard[base] = true; + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' } } - // walk down the path, swapping out linked pathparts for their real - // values - // NB: p.length changes. - while (pos < p.length) { - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - continue; - } + // Mark this as a match + this._emitMatch(index, prefix) +} - var resolvedLink; - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // some known symbolic link. no need to stat again. - resolvedLink = cache[base]; - } else { - var stat = fs.lstatSync(base); - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - continue; - } +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' - // read the link if it wasn't read before - // dev/ino always return 0 on windows, so skip the check. - var linkTarget = null; - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - linkTarget = seenLinks[id]; - } - } - if (linkTarget === null) { - fs.statSync(base); - linkTarget = fs.readlinkSync(base); - } - resolvedLink = pathModule.resolve(previous, linkTarget); - // track this, if given a cache. - if (cache) cache[base] = resolvedLink; - if (!isWindows) seenLinks[id] = linkTarget; - } + if (f.length > this.maxLength) + return false - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] - if (cache) cache[original] = p; + if (Array.isArray(c)) + c = 'DIR' - return p; -}; + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + if (needDir && c === 'FILE') + return false -exports.realpath = function realpath(p, cache, cb) { - if (typeof cb !== 'function') { - cb = maybeCallback(cache); - cache = null; + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. } - // make p is absolute - p = pathModule.resolve(p); - - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return process.nextTick(cb.bind(null, null, cache[p])); - } - - var original = p, - seenLinks = {}, - knownHard = {}; - - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; - - start(); - - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstat(base, function(err) { - if (err) return cb(err); - knownHard[base] = true; - LOOP(); - }); + if (lstat && lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } } else { - process.nextTick(LOOP); + stat = lstat } } - // walk down the path, swapping out linked pathparts for their real - // values - function LOOP() { - // stop if scanned past end of path - if (pos >= p.length) { - if (cache) cache[original] = p; - return cb(null, p); - } - - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - return process.nextTick(LOOP); - } + this.statCache[abs] = stat - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // known symbolic link. no need to stat again. - return gotResolvedLink(cache[base]); - } + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' - return fs.lstat(base, gotStat); - } + this.cache[abs] = this.cache[abs] || c - function gotStat(err, stat) { - if (err) return cb(err); + if (needDir && c === 'FILE') + return false - // if not a symlink, skip to the next path part - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - return process.nextTick(LOOP); - } + return c +} - // stat & read the link if not read before - // call gotTarget as soon as the link target is known - // dev/ino always return 0 on windows, so skip the check. - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - return gotTarget(null, seenLinks[id], base); - } - } - fs.stat(base, function(err) { - if (err) return cb(err); +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} - fs.readlink(base, function(err, target) { - if (!isWindows) seenLinks[id] = target; - gotTarget(err, target); - }); - }); - } +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} - function gotTarget(err, target, base) { - if (err) return cb(err); - var resolvedLink = pathModule.resolve(previous, target); - if (cache) cache[base] = resolvedLink; - gotResolvedLink(resolvedLink); - } +/***/ }), - function gotResolvedLink(resolvedLink) { - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } -}; +/***/ 87: +/***/ (function(module) { +module.exports = require("os"); /***/ }), -/***/ 141: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 93: +/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. +module.exports = glob -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); +var fs = __webpack_require__(747) +var rp = __webpack_require__(909) +var minimatch = __webpack_require__(727) +var Minimatch = minimatch.Minimatch +var inherits = __webpack_require__(919) +var EE = __webpack_require__(614).EventEmitter +var path = __webpack_require__(622) +var assert = __webpack_require__(357) +var isAbsolute = __webpack_require__(409) +var globSync = __webpack_require__(51) +var common = __webpack_require__(459) +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = __webpack_require__(691) +var util = __webpack_require__(669) +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored +var once = __webpack_require__(731) -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; + return new Glob(pattern, options, cb) } -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} +// old api surface +glob.glob = glob -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin } +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } + var g = new Glob(pattern, options) + var set = g.minimatch.set - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; + if (!pattern) + return false -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + if (set.length > 1) + return true - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + return false +} - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null } - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } + setopts(this, pattern, options) + this._didRealPath = false - function onError(cause) { - connectReq.removeAllListeners(); + // process each pattern in the minimatch set + var n = this.minimatch.set.length - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) } - this.sockets.splice(pos, 1); - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; + var self = this + this._processing = 0 -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); + this._emitQueue = [] + this._processQueue = [] + this.paused = false - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} + if (this.noprocess) + return this + if (n === 0) + return done() -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) } - return host; // for v0.11 or later -} + sync = false -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() } } } - return target; } +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test + if (this.realpath && !this._didRealpath) + return this._realpath() + common.finish(this) + this.emit('end', this.found) +} -/***/ }), - -/***/ 150: -/***/ (function(module, __unusedexports, __webpack_require__) { +Glob.prototype._realpath = function () { + if (this._didRealpath) + return -/*! - * Tmp - * - * Copyright (c) 2011-2017 KARASZI Istvan - * - * MIT Licensed - */ + this._didRealpath = true -/* - * Module dependencies. - */ -const fs = __webpack_require__(747); -const os = __webpack_require__(87); -const path = __webpack_require__(622); -const crypto = __webpack_require__(417); -const _c = { fs: fs.constants, os: os.constants }; -const rimraf = __webpack_require__(13); + var n = this.matches.length + if (n === 0) + return this._finish() -/* - * The working inner variables. - */ -const - // the random characters to choose from - RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) - TEMPLATE_PATTERN = /XXXXXX/, + function next () { + if (--n === 0) + self._finish() + } +} - DEFAULT_TRIES = 3, +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() - CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), + var found = Object.keys(matchset) + var self = this + var n = found.length - // constants are off on the windows platform and will not match the actual errno codes - IS_WIN32 = os.platform() === 'win32', - EBADF = _c.EBADF || _c.os.errno.EBADF, - ENOENT = _c.ENOENT || _c.os.errno.ENOENT, + if (n === 0) + return cb() - DIR_MODE = 0o700 /* 448 */, - FILE_MODE = 0o600 /* 384 */, + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here - EXIT = 'exit', + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} - // this will hold the objects need to be removed on exit - _removeObjects = [], +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} - // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback - FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), - FN_RIMRAF_SYNC = rimraf.sync; +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} -let - _gracefulCleanup = false; +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} -/** - * Gets a temporary file name. - * - * @param {(Options|tmpNameCallback)} options options or callback - * @param {?tmpNameCallback} callback the callback function - */ -function tmpName(options, callback) { - const - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} - try { - _assertAndSanitizeOptions(opts); - } catch (err) { - return cb(err); +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } } +} - let tries = opts.tries; - (function _getUniqueName() { - try { - const name = _generateTmpName(opts); +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') - // check whether the path exists then retry if needed - fs.stat(name, function (err) { - /* istanbul ignore else */ - if (!err) { - /* istanbul ignore else */ - if (tries-- > 0) return _getUniqueName(); + if (this.aborted) + return - return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); - } + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } - cb(null, name); - }); - } catch (err) { - cb(err); - } - }()); -} + //console.error('PROCESS %d', this._processing, pattern) -/** - * Synchronous version of tmpName. - * - * @param {Object} options - * @returns {string} the generated random name - * @throws {Error} if the options are invalid or could not generate a filename - */ -function tmpNameSync(options) { - const - args = _parseArguments(options), - opts = args[0]; + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. - _assertAndSanitizeOptions(opts); + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return - let tries = opts.tries; - do { - const name = _generateTmpName(opts); - try { - fs.statSync(name); - } catch (e) { - return name; - } - } while (tries-- > 0); + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break - throw new Error('Could not get a unique tmp filename, max tries reached'); -} + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } -/** - * Creates and opens a temporary file. - * - * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined - * @param {?fileCallback} callback - */ -function file(options, callback) { - const - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; + var remain = pattern.slice(n) - // gets a temporary filename - tmpName(opts, function _tmpNameCreated(err, name) { - /* istanbul ignore else */ - if (err) return cb(err); + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix - // create and open the file - fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { - /* istanbu ignore else */ - if (err) return cb(err); + var abs = this._makeAbs(read) - if (opts.discardDescriptor) { - return fs.close(fd, function _discardCallback(possibleErr) { - // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only - return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); - }); - } else { - // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care - // about the descriptor - const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; - cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); - } - }); - }); -} - -/** - * Synchronous version of file. - * - * @param {Options} options - * @returns {FileSyncObject} object consists of name, fd and removeCallback - * @throws {Error} if cannot create a file - */ -function fileSync(options) { - const - args = _parseArguments(options), - opts = args[0]; + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() - const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; - const name = tmpNameSync(opts); - var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); - /* istanbul ignore else */ - if (opts.discardDescriptor) { - fs.closeSync(fd); - fd = undefined; - } + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} - return { - name: name, - fd: fd, - removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) - }; +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) } -/** - * Creates a temporary directory. - * - * @param {(Options|dirCallback)} options the options or the callback function - * @param {?dirCallback} callback - */ -function dir(options, callback) { - const - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - // gets a temporary filename - tmpName(opts, function _tmpNameCreated(err, name) { - /* istanbul ignore else */ - if (err) return cb(err); + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() - // create the directory - fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { - /* istanbul ignore else */ - if (err) return cb(err); + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' - cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); - }); - }); -} + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } -/** - * Synchronous version of dir. - * - * @param {Options} options - * @returns {DirSyncObject} object consists of name and removeCallback - * @throws {Error} if it cannot create a directory - */ -function dirSync(options) { - const - args = _parseArguments(options), - opts = args[0]; + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - const name = tmpNameSync(opts); - fs.mkdirSync(name, opts.mode || DIR_MODE); + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() - return { - name: name, - removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) - }; -} + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. -/** - * Removes files asynchronously. - * - * @param {Object} fdPath - * @param {Function} next - * @private - */ -function _removeFileAsync(fdPath, next) { - const _handler = function (err) { - if (err && !_isENOENT(err)) { - // reraise any unanticipated error - return next(err); - } - next(); - }; + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) - if (0 <= fdPath[0]) - fs.close(fdPath[0], function () { - fs.unlink(fdPath[1], _handler); - }); - else fs.unlink(fdPath[1], _handler); -} + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } -/** - * Removes files synchronously. - * - * @param {Object} fdPath - * @private - */ -function _removeFileSync(fdPath) { - let rethrownException = null; - try { - if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); - } catch (e) { - // reraise any unanticipated error - if (!_isEBADF(e) && !_isENOENT(e)) throw e; - } finally { - try { - fs.unlinkSync(fdPath[1]); - } - catch (e) { - // reraise any unanticipated error - if (!_isENOENT(e)) rethrownException = e; + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) } + // This was the last one, and no stats were needed + return cb() } - if (rethrownException !== null) { - throw rethrownException; + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) } + cb() } -/** - * Prepares the callback for removal of the temporary file. - * - * Returns either a sync callback or a async callback depending on whether - * fileSync or file was called, which is expressed by the sync parameter. - * - * @param {string} name the path of the file - * @param {number} fd file descriptor - * @param {Object} opts - * @param {boolean} sync - * @returns {fileCallback | fileCallbackSync} - * @private - */ -function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { - const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); - const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return - if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + if (isIgnored(this, e)) + return - return sync ? removeCallbackSync : removeCallback; -} + if (this.paused) { + this._emitQueue.push([index, e]) + return + } -/** - * Prepares the callback for removal of the temporary directory. - * - * Returns either a sync callback or a async callback depending on whether - * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. - * - * @param {string} name - * @param {Object} opts - * @param {boolean} sync - * @returns {Function} the callback - * @private - */ -function _prepareTmpDirRemoveCallback(name, opts, sync) { - const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); - const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; - const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); - const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); - if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + var abs = isAbsolute(e) ? e : this._makeAbs(e) - return sync ? removeCallbackSync : removeCallback; -} + if (this.mark) + e = this._mark(e) -/** - * Creates a guarded function wrapping the removeFunction call. - * - * The cleanup callback is save to be called multiple times. - * Subsequent invocations will be ignored. - * - * @param {Function} removeFunction - * @param {string} fileOrDirName - * @param {boolean} sync - * @param {cleanupCallbackSync?} cleanupCallbackSync - * @returns {cleanupCallback | cleanupCallbackSync} - * @private - */ -function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { - let called = false; + if (this.absolute) + e = abs - // if sync is true, the next parameter will be ignored - return function _cleanupCallback(next) { + if (this.matches[index][e]) + return - /* istanbul ignore else */ - if (!called) { - // remove cleanupCallback from cache - const toRemove = cleanupCallbackSync || _cleanupCallback; - const index = _removeObjects.indexOf(toRemove); - /* istanbul ignore else */ - if (index >= 0) _removeObjects.splice(index, 1); + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } - called = true; - if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { - return removeFunction(fileOrDirName); - } else { - return removeFunction(fileOrDirName, next || function() {}); - } - } - }; + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) } -/** - * The garbage collector. - * - * @private - */ -function _garbageCollector() { - /* istanbul ignore else */ - if (!_gracefulCleanup) return; +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return - // the function being called removes itself from _removeObjects, - // loop until _removeObjects is empty - while (_removeObjects.length) { - try { - _removeObjects[0](); - } catch (e) { - // already removed? - } + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) } } -/** - * Random name generator based on crypto. - * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript - * - * @param {number} howMany - * @returns {string} the generated random name - * @private - */ -function _randomChars(howMany) { - let - value = [], - rnd = null; +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return - // make sure that we do not fail because we ran out of entropy - try { - rnd = crypto.randomBytes(howMany); - } catch (e) { - rnd = crypto.pseudoRandomBytes(howMany); - } + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return - for (var i = 0; i < howMany; i++) { - value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); - } + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) - return value.join(''); -} + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() -/** - * Helper which determines whether a string s is blank, that is undefined, or empty or null. - * - * @private - * @param {string} s - * @returns {Boolean} true whether the string s is blank, false otherwise - */ -function _isBlank(s) { - return s === null || _isUndefined(s) || !s.trim(); -} + if (Array.isArray(c)) + return cb(null, c) + } -/** - * Checks whether the `obj` parameter is defined or not. - * - * @param {Object} obj - * @returns {boolean} true if the object is undefined - * @private - */ -function _isUndefined(obj) { - return typeof obj === 'undefined'; + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) } -/** - * Parses the function arguments. - * - * This function helps to have optional arguments. - * - * @param {(Options|null|undefined|Function)} options - * @param {?Function} callback - * @returns {Array} parsed arguments - * @private - */ -function _parseArguments(options, callback) { - /* istanbul ignore else */ - if (typeof options === 'function') { - return [{}, options]; +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) } +} - /* istanbul ignore else */ - if (_isUndefined(options)) { - return [{}, callback]; - } +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return - // copy options so we do not leak the changes we make internally - const actualOptions = {}; - for (const key of Object.getOwnPropertyNames(options)) { - actualOptions[key] = options[key]; + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } } - return [actualOptions, callback]; + this.cache[abs] = entries + return cb(null, entries) } -/** - * Generates a new temporary name. - * - * @param {Object} opts - * @returns {string} the new random name according to opts - * @private - */ -function _generateTmpName(opts) { +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return - const tmpDir = opts.tmpdir; + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break - /* istanbul ignore else */ - if (!_isUndefined(opts.name)) - return path.join(tmpDir, opts.dir, opts.name); + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break - /* istanbul ignore else */ - if (!_isUndefined(opts.template)) - return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } - // prefix and postfix - const name = [ - opts.prefix ? opts.prefix : 'tmp', - '-', - process.pid, - '-', - _randomChars(12), - opts.postfix ? '-' + opts.postfix : '' - ].join(''); + return cb() +} - return path.join(tmpDir, opts.dir, name); +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) } -/** - * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing - * options. - * - * @param {Options} options - * @private - */ -function _assertAndSanitizeOptions(options) { - options.tmpdir = _getTmpDir(options); +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) - const tmpDir = options.tmpdir; + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() - /* istanbul ignore else */ - if (!_isUndefined(options.name)) - _assertIsRelative(options.name, 'name', tmpDir); - /* istanbul ignore else */ - if (!_isUndefined(options.dir)) - _assertIsRelative(options.dir, 'dir', tmpDir); - /* istanbul ignore else */ - if (!_isUndefined(options.template)) { - _assertIsRelative(options.template, 'template', tmpDir); - if (!options.template.match(TEMPLATE_PATTERN)) - throw new Error(`Invalid template, found "${options.template}".`); - } - /* istanbul ignore else */ - if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) - throw new Error(`Invalid tries, found "${options.tries}".`); + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) - // if a name was specified we will try once - options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; - options.keep = !!options.keep; - options.detachDescriptor = !!options.detachDescriptor; - options.discardDescriptor = !!options.discardDescriptor; - options.unsafeCleanup = !!options.unsafeCleanup; + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) - // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to - options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); - options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); - // sanitize further if template is relative to options.dir - options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); + var isSym = this.symlinks[abs] + var len = entries.length - // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to - options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); - options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; - options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; -} + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() -/** - * Resolve the specified path name in respect to tmpDir. - * - * The specified name might include relative path components, e.g. ../ - * so we need to resolve in order to be sure that is is located inside tmpDir - * - * @param name - * @param tmpDir - * @returns {string} - * @private - */ -function _resolvePath(name, tmpDir) { - const sanitizedName = _sanitizeName(name); - if (sanitizedName.startsWith(tmpDir)) { - return path.resolve(sanitizedName); - } else { - return path.resolve(path.join(tmpDir, sanitizedName)); + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) } + + cb() } -/** - * Sanitize the specified path name by removing all quote characters. - * - * @param name - * @returns {string} - * @private - */ -function _sanitizeName(name) { - if (_isBlank(name)) { - return name; - } - return name.replace(/["']/g, ''); +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) } +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { -/** - * Asserts whether specified name is relative to the specified tmpDir. - * - * @param {string} name - * @param {string} option - * @param {string} tmpDir - * @throws {Error} - * @private - */ -function _assertIsRelative(name, option, tmpDir) { - if (option === 'name') { - // assert that name is not absolute and does not contain a path - if (path.isAbsolute(name)) - throw new Error(`${option} option must not contain an absolute path, found "${name}".`); - // must not fail on valid . or .. or similar such constructs - let basename = path.basename(name); - if (basename === '..' || basename === '.' || basename !== name) - throw new Error(`${option} option must not contain a path, found "${name}".`); - } - else { // if (option === 'dir' || option === 'template') { - // assert that dir or template are relative to tmpDir - if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { - throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' } - let resolvedPath = _resolvePath(name, tmpDir); - if (!resolvedPath.startsWith(tmpDir)) - throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); } -} -/** - * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. - * - * @private - */ -function _isEBADF(error) { - return _isExpectedError(error, -EBADF, 'EBADF'); -} + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') -/** - * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. - * - * @private - */ -function _isENOENT(error) { - return _isExpectedError(error, -ENOENT, 'ENOENT'); + // Mark this as a match + this._emitMatch(index, prefix) + cb() } -/** - * Helper to determine whether the expected error code matches the actual code and errno, - * which will differ between the supported node versions. - * - * - Node >= 7.0: - * error.code {string} - * error.errno {number} any numerical value will be negated - * - * CAVEAT - * - * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT - * is no different here. - * - * @param {SystemError} error - * @param {number} errno - * @param {string} code - * @private - */ -function _isExpectedError(error, errno, code) { - return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; -} +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' -/** - * Sets the graceful cleanup. - * - * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the - * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary - * object removals. - */ -function setGracefulCleanup() { - _gracefulCleanup = true; -} + if (f.length > this.maxLength) + return cb() -/** - * Returns the currently configured tmp dir from os.tmpdir(). - * - * @private - * @param {?Options} options - * @returns {string} the currently configured tmp dir - */ -function _getTmpDir(options) { - return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); -} + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] -// Install process exit listener -process.addListener(EXIT, _garbageCollector); + if (Array.isArray(c)) + c = 'DIR' -/** - * Configuration options. - * - * @typedef {Object} Options - * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected - * @property {?number} tries the number of tries before give up the name generation - * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files - * @property {?string} template the "mkstemp" like filename template - * @property {?string} name fixed name relative to tmpdir or the specified dir option - * @property {?string} dir tmp directory relative to the root tmp directory in use - * @property {?string} prefix prefix for the generated name - * @property {?string} postfix postfix for the generated name - * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir - * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty - * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection - * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection - */ + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) -/** - * @typedef {Object} FileSyncObject - * @property {string} name the name of the file - * @property {string} fd the file descriptor or -1 if the fd has been discarded - * @property {fileCallback} removeCallback the callback function to remove the file - */ + if (needDir && c === 'FILE') + return cb() -/** - * @typedef {Object} DirSyncObject - * @property {string} name the name of the directory - * @property {fileCallback} removeCallback the callback function to remove the directory - */ - -/** - * @callback tmpNameCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - */ - -/** - * @callback fileCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {number} fd the file descriptor or -1 if the fd had been discarded - * @param {cleanupCallback} fn the cleanup callback function - */ - -/** - * @callback fileCallbackSync - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {number} fd the file descriptor or -1 if the fd had been discarded - * @param {cleanupCallbackSync} fn the cleanup callback function - */ - -/** - * @callback dirCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {cleanupCallback} fn the cleanup callback function - */ - -/** - * @callback dirCallbackSync - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {cleanupCallbackSync} fn the cleanup callback function - */ - -/** - * Removes the temporary created file or directory. - * - * @callback cleanupCallback - * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed - */ + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } -/** - * Removes the temporary created file or directory. - * - * @callback cleanupCallbackSync - */ + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } -/** - * Callback function for function composition. - * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} - * - * @callback simpleCallback - */ + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) -// exporting all the needed methods + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} -// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will -// allow users to reconfigure the temporary directory -Object.defineProperty(module.exports, 'tmpdir', { - enumerable: true, - configurable: false, - get: function () { - return _getTmpDir(); +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() } -}); -module.exports.dir = dir; -module.exports.dirSync = dirSync; + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat -module.exports.file = file; -module.exports.fileSync = fileSync; + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) -module.exports.tmpName = tmpName; -module.exports.tmpNameSync = tmpNameSync; + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c -module.exports.setGracefulCleanup = setGracefulCleanup; + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} /***/ }), -/***/ 176: +/***/ 128: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; Object.defineProperty(exports, "__esModule", { value: true }); -exports.StatusReporter = void 0; -const core_1 = __webpack_require__(470); +exports.digestForStream = exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0; +const crypto_1 = __importDefault(__webpack_require__(417)); +const fs_1 = __webpack_require__(747); +const core_1 = __webpack_require__(676); +const http_client_1 = __webpack_require__(582); +const auth_1 = __webpack_require__(479); +const config_variables_1 = __webpack_require__(750); +const crc64_1 = __importDefault(__webpack_require__(335)); /** - * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded - * - * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable - * The total status of the upload/download gets displayed according to this value - * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + * Returns a retry time in milliseconds that exponentially gets larger + * depending on the amount of retries that have been attempted */ -class StatusReporter { - constructor(displayFrequencyInMilliseconds) { - this.totalNumberOfFilesToProcess = 0; - this.processedCount = 0; - this.largeFiles = new Map(); - this.totalFileStatus = undefined; - this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; - } - setTotalNumberOfFilesToProcess(fileTotal) { - this.totalNumberOfFilesToProcess = fileTotal; - this.processedCount = 0; - } - start() { - // displays information about the total upload/download status - this.totalFileStatus = setInterval(() => { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); - core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); - }, this.displayFrequencyInMilliseconds); - } - // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload - updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); - core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); - } - stop() { - if (this.totalFileStatus) { - clearInterval(this.totalFileStatus); - } +function getExponentialRetryTimeInMilliseconds(retryCount) { + if (retryCount < 0) { + throw new Error('RetryCount should not be negative'); } - incrementProcessedCount() { - this.processedCount++; + else if (retryCount === 0) { + return config_variables_1.getInitialRetryIntervalInMilliseconds(); } - formatPercentage(numerator, denominator) { - // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed - return ((numerator / denominator) * 100).toFixed(4).toString(); + const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; + const maxTime = minTime * config_variables_1.getRetryMultiplier(); + // returns a random number between the minTime (inclusive) and the maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); +} +exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds; +/** + * Parses a env variable that is a number + */ +function parseEnvNumber(key) { + const value = Number(process.env[key]); + if (Number.isNaN(value) || value < 0) { + return undefined; } + return value; } -exports.StatusReporter = StatusReporter; -//# sourceMappingURL=status-reporter.js.map - -/***/ }), - -/***/ 211: -/***/ (function(module) { - -module.exports = require("https"); - -/***/ }), - -/***/ 214: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -exports.create = void 0; -const artifact_client_1 = __webpack_require__(359); +exports.parseEnvNumber = parseEnvNumber; /** - * Constructs an ArtifactClient + * Various utility functions to help with the necessary API calls */ -function create() { - return artifact_client_1.DefaultArtifactClient.create(); +function getApiVersion() { + return '6.0-preview'; } -exports.create = create; -//# sourceMappingURL=artifact-client.js.map - -/***/ }), - -/***/ 226: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + - Buffer.from(this.username + ':' + this.password).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { +exports.getApiVersion = getApiVersion; +function isSuccessStatusCode(statusCode) { + if (!statusCode) { return false; } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } + return statusCode >= 200 && statusCode < 300; } -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = 'Bearer ' + this.token; +exports.isSuccessStatusCode = isSuccessStatusCode; +function isForbiddenStatusCode(statusCode) { + if (!statusCode) { + return false; } - // This handler cannot handle 401 - canHandleAuthentication(response) { + return statusCode === http_client_1.HttpCodes.Forbidden; +} +exports.isForbiddenStatusCode = isForbiddenStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { return false; } - handleAuthentication(httpClient, requestInfo, objs) { - return null; + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.TooManyRequests, + 413 // Payload Too Large + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function isThrottledStatusCode(statusCode) { + if (!statusCode) { + return false; } + return statusCode === http_client_1.HttpCodes.TooManyRequests; } -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; +exports.isThrottledStatusCode = isThrottledStatusCode; +/** + * Attempts to get the retry-after value from a set of http headers. The retry time + * is originally denoted in seconds, so if present, it is converted to milliseconds + * @param headers all the headers received when making an http call + */ +function tryGetRetryAfterValueTimeInMilliseconds(headers) { + if (headers['retry-after']) { + const retryTime = Number(headers['retry-after']); + if (!isNaN(retryTime)) { + core_1.info(`Retry-After header is present with a value of ${retryTime}`); + return retryTime * 1000; + } + core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); + return undefined; } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`); + // eslint-disable-next-line no-console + console.log(headers); + return undefined; +} +exports.tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds; +function getContentRange(start, end, total) { + // Format: `bytes start-end/fileSize + // start and end are inclusive + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/200 + return `bytes ${start}-${end}/${total}`; +} +exports.getContentRange = getContentRange; +/** + * Sets all the necessary headers when downloading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} acceptGzip can we accept a gzip encoded response + * @param {string} acceptType the type of content that we can accept + * @returns appropriate headers to make a specific http call during artifact download + */ +function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) { + const requestOptions = {}; + if (contentType) { + requestOptions['Content-Type'] = contentType; } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive'; + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10'; + } + if (acceptGzip) { + // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header + requestOptions['Accept-Encoding'] = 'gzip'; + requestOptions['Accept'] = `application/octet-stream;api-version=${getApiVersion()}`; } - handleAuthentication(httpClient, requestInfo, objs) { - return null; + else { + // default to application/json if we are not working with gzip content + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; } + return requestOptions; } -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; - - -/***/ }), - -/***/ 245: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = globSync -globSync.GlobSync = GlobSync - -var fs = __webpack_require__(747) -var rp = __webpack_require__(302) -var minimatch = __webpack_require__(93) -var Minimatch = minimatch.Minimatch -var Glob = __webpack_require__(402).Glob -var util = __webpack_require__(669) -var path = __webpack_require__(622) -var assert = __webpack_require__(357) -var isAbsolute = __webpack_require__(681) -var common = __webpack_require__(856) -var alphasort = common.alphasort -var alphasorti = common.alphasorti -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - return new GlobSync(pattern, options).found +exports.getDownloadHeaders = getDownloadHeaders; +/** + * Sets all the necessary headers when uploading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} isGzip is the connection being used to upload GZip compressed content + * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed + * @param {number} contentLength the length of the content that is being uploaded + * @param {string} contentRange the range of the content that is being uploaded + * @returns appropriate headers to make a specific http call during artifact upload + */ +function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange, digest) { + const requestOptions = {}; + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; + if (contentType) { + requestOptions['Content-Type'] = contentType; + } + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive'; + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10'; + } + if (isGzip) { + requestOptions['Content-Encoding'] = 'gzip'; + requestOptions['x-tfs-filelength'] = uncompressedLength; + } + if (contentLength) { + requestOptions['Content-Length'] = contentLength; + } + if (contentRange) { + requestOptions['Content-Range'] = contentRange; + } + if (digest) { + requestOptions['x-actions-results-crc64'] = digest.crc64; + requestOptions['x-actions-results-md5'] = digest.md5; + } + return requestOptions; } +exports.getUploadHeaders = getUploadHeaders; +function createHttpClient(userAgent) { + return new http_client_1.HttpClient(userAgent, [ + new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken()) + ]); +} +exports.createHttpClient = createHttpClient; +function getArtifactUrl() { + const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; + core_1.debug(`Artifact Url: ${artifactUrl}`); + return artifactUrl; +} +exports.getArtifactUrl = getArtifactUrl; +/** + * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information + * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but + * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only + * the information that we want. + * + * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. + * Other information such as the headers, the response code and message might be useful, so this is displayed. + */ +function displayHttpDiagnostics(response) { + core_1.info(`##### Begin Diagnostic HTTP information ##### +Status Code: ${response.message.statusCode} +Status Message: ${response.message.statusMessage} +Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} +###### End Diagnostic HTTP information ######`); +} +exports.displayHttpDiagnostics = displayHttpDiagnostics; +function createDirectoriesForArtifact(directories) { + return __awaiter(this, void 0, void 0, function* () { + for (const directory of directories) { + yield fs_1.promises.mkdir(directory, { + recursive: true + }); + } + }); +} +exports.createDirectoriesForArtifact = createDirectoriesForArtifact; +function createEmptyFilesForArtifact(emptyFilesToCreate) { + return __awaiter(this, void 0, void 0, function* () { + for (const filePath of emptyFilesToCreate) { + yield (yield fs_1.promises.open(filePath, 'w')).close(); + } + }); +} +exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact; +function getFileSize(filePath) { + return __awaiter(this, void 0, void 0, function* () { + const stats = yield fs_1.promises.stat(filePath); + core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); + return stats.size; + }); +} +exports.getFileSize = getFileSize; +function rmFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + yield fs_1.promises.unlink(filePath); + }); +} +exports.rmFile = rmFile; +function getProperRetention(retentionInput, retentionSetting) { + if (retentionInput < 0) { + throw new Error('Invalid retention, minimum value is 1.'); + } + let retention = retentionInput; + if (retentionSetting) { + const maxRetention = parseInt(retentionSetting); + if (!isNaN(maxRetention) && maxRetention < retention) { + core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); + retention = maxRetention; + } + } + return retention; +} +exports.getProperRetention = getProperRetention; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); + }); +} +exports.sleep = sleep; +function digestForStream(stream) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + const crc64 = new crc64_1.default(); + const md5 = crypto_1.default.createHash('md5'); + stream + .on('data', data => { + crc64.update(data); + md5.update(data); + }) + .on('end', () => resolve({ + crc64: crc64.digest('base64'), + md5: md5.digest('base64') + })) + .on('error', reject); + }); + }); +} +exports.digestForStream = digestForStream; +//# sourceMappingURL=utils.js.map -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') - - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) +/***/ }), - setopts(this, pattern, options) +/***/ 139: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - if (this.noprocess) - return this +"use strict"; - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) - } - this._finish() -} - -GlobSync.prototype._finish = function () { - assert(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { - try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0; +const fs = __importStar(__webpack_require__(747)); +const zlib = __importStar(__webpack_require__(761)); +const util_1 = __webpack_require__(669); +const stat = util_1.promisify(fs.stat); +/** + * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip + * files then will just waste a lot of time before ultimately being discarded (especially for very large files). + * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is + */ +const gzipExemptFileExtensions = [ + '.gzip', + '.zip', + '.tar.lz', + '.tar.gz', + '.tar.bz2', + '.7z' +]; +/** + * Creates a Gzip compressed file of an original file at the provided temporary filepath location + * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified + * @param {string} tempFilePath the location of where the Gzip file will be created + * @returns the size of gzip file that gets created + */ +function createGZipFileOnDisk(originalFilePath, tempFilePath) { + return __awaiter(this, void 0, void 0, function* () { + for (const gzipExemptExtension of gzipExemptFileExtensions) { + if (originalFilePath.endsWith(gzipExemptExtension)) { + // return a really large number so that the original file gets uploaded + return Number.MAX_SAFE_INTEGER; + } } - } - }) - } - common.finish(this) + return new Promise((resolve, reject) => { + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + const outputStream = fs.createWriteStream(tempFilePath); + inputStream.pipe(gzip).pipe(outputStream); + outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { + // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload + const size = (yield stat(tempFilePath)).size; + resolve(size); + })); + outputStream.on('error', error => { + // eslint-disable-next-line no-console + console.log(error); + reject; + }); + }); + }); +} +exports.createGZipFileOnDisk = createGZipFileOnDisk; +/** + * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O + * @param originalFilePath the path to the original file that is being GZipped + * @returns a buffer with the GZip file + */ +function createGZipFileInBuffer(originalFilePath) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + var e_1, _a; + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + inputStream.pipe(gzip); + // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 + const chunks = []; + try { + for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { + const chunk = gzip_1_1.value; + chunks.push(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); + } + finally { if (e_1) throw e_1.error; } + } + resolve(Buffer.concat(chunks)); + })); + }); } +exports.createGZipFileInBuffer = createGZipFileInBuffer; +//# sourceMappingURL=upload-gzip.js.map +/***/ }), -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert(this instanceof GlobSync) +/***/ 166: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. +"use strict"; - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const internal_globber_1 = __webpack_require__(941); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break +/***/ }), - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } +/***/ 171: +/***/ (function(module, __unusedexports, __webpack_require__) { - var remain = pattern.slice(n) +var concatMap = __webpack_require__(857); +var balanced = __webpack_require__(507); - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix +module.exports = expandTop; - var abs = this._makeAbs(read) +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; - //if ignored, skip processing - if (childrenIgnored(this, read)) - return +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); } +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) - // if the abs isn't a dir, then nothing can match! - if (!entries) - return +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' + var parts = []; + var m = balanced('{', '}', str); - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } + if (!m) + return str.split(','); - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) + parts.push.apply(parts, p); - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e - } + return parts; +} - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return - } +function expandTop(str) { + if (!str) + return []; - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); } + + return expand(escapeBraces(str), true).map(unescapeBraces); } +function identity(e) { + return e; +} -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} - var abs = this._makeAbs(e) +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} - if (this.mark) - e = this._mark(e) +function expand(str, isTop) { + var expansions = []; - if (this.absolute) { - e = abs - } + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; - if (this.matches[index][e]) - return + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } } - this.matches[index][e] = true + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. - if (this.stat) - this._stat(e) -} + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + var N; -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); - var entries - var lstat - var stat - try { - lstat = fs.lstatSync(abs) - } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); } - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } - return entries + return expansions; } -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null +/***/ }), - if (Array.isArray(c)) - return c - } +/***/ 175: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - try { - return this._readdirEntries(abs, fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null - } -} +"use strict"; -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true +Object.defineProperty(exports, "__esModule", { value: true }); +const assert = __webpack_require__(357); +const path = __webpack_require__(622); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; } - } - - this.cache[abs] = entries - - // mark and cache dir-ness - return entries + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; } - -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break - } +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; + } + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; + } + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + } + } + // Different drive + else { + return `${itemPath[0]}:\\${itemPath.substr(2)}`; + } + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; + } + return root + itemPath; } +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - - var entries = this._readdir(abs, inGlobStar) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) +/***/ }), - var len = entries.length - var isSym = this.symlinks[abs] +/***/ 211: +/***/ (function(module) { - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return +module.exports = require("https"); - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue +/***/ }), - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) +/***/ 230: +/***/ (function(__unusedmodule, exports) { - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) - } -} +"use strict"; -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +//# sourceMappingURL=proxy.js.map - if (!this.matches[index]) - this.matches[index] = Object.create(null) +/***/ }), - // If it doesn't exist, then just mark the lack of results - if (!exists) - return +/***/ 242: +/***/ (function(__unusedmodule, exports) { - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; } - } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') +/***/ }), - // Mark this as a match - this._emitMatch(index, prefix) -} +/***/ 254: +/***/ (function(module, __unusedexports, __webpack_require__) { -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' +const assert = __webpack_require__(357) +const path = __webpack_require__(622) +const fs = __webpack_require__(747) +let glob = undefined +try { + glob = __webpack_require__(93) +} catch (_err) { + // treat glob as optional. +} - if (f.length > this.maxLength) - return false +const defaultGlobOpts = { + nosort: true, + silent: true +} - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] +// for EMFILE handling +let timeout = 0 - if (Array.isArray(c)) - c = 'DIR' +const isWindows = (process.platform === "win32") - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c +const defaults = options => { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) - if (needDir && c === 'FILE') - return false + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. +const rimraf = (p, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} } - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat - try { - lstat = fs.lstatSync(abs) - } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - if (lstat && lstat.isSymbolicLink()) { - try { - stat = fs.statSync(abs) - } catch (er) { - stat = lstat - } - } else { - stat = lstat - } - } + defaults(options) - this.statCache[abs] = stat + let busyTries = 0 + let errState = null + let n = 0 - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' + const next = (er) => { + errState = errState || er + if (--n === 0) + cb(errState) + } - this.cache[abs] = this.cache[abs] || c + const afterGlob = (er, results) => { + if (er) + return cb(er) - if (needDir && c === 'FILE') - return false + n = results.length + if (n === 0) + return cb() - return c -} + results.forEach(p => { + const CB = (er) => { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) + } -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) -} + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(() => rimraf_(p, options, CB), timeout ++) + } -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} + // already gone + if (er.code === "ENOENT") er = null + } + timeout = 0 + next(er) + } + rimraf_(p, options, CB) + }) + } -/***/ }), + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) -/***/ 281: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + options.lstat(p, (er, stat) => { + if (!er) + return afterGlob(null, [p]) -"use strict"; + glob(p, options.glob, afterGlob) + }) -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const internal_globber_1 = __webpack_require__(297); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); } -exports.create = create; -//# sourceMappingURL=glob.js.map - -/***/ }), -/***/ 297: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +const rimraf_ = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') -"use strict"; + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === "ENOENT") + return cb(null) -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __webpack_require__(470); -const fs = __webpack_require__(747); -const globOptionsHelper = __webpack_require__(601); -const path = __webpack_require__(622); -const patternHelper = __webpack_require__(597); -const internal_match_kind_1 = __webpack_require__(327); -const internal_pattern_1 = __webpack_require__(923); -const internal_search_state_1 = __webpack_require__(728); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); - } - glob() { - var e_1, _a; - return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } - } - return result; - }); - } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**'))); - } - } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory) { - yield yield __await(item.path); - } - // Descend? - else if (!partialMatch) { - continue; - } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); - } - } - }); - } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); - } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; - } - // Pattern - else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); - } - static stat(item, options, traversalChain) { - return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); - } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } - else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); - } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; - } - // Update the traversal chain - traversalChain.push(realPath); - } - return stats; - }); - } -} -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) -/***/ }), + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) -/***/ 302: -/***/ (function(module, __unusedexports, __webpack_require__) { + options.unlink(p, er => { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} -module.exports = realpath -realpath.realpath = realpath -realpath.sync = realpathSync -realpath.realpathSync = realpathSync -realpath.monkeypatch = monkeypatch -realpath.unmonkeypatch = unmonkeypatch - -var fs = __webpack_require__(747) -var origRealpath = fs.realpath -var origRealpathSync = fs.realpathSync - -var version = process.version -var ok = /^v[0-5]\./.test(version) -var old = __webpack_require__(117) - -function newError (er) { - return er && er.syscall === 'realpath' && ( - er.code === 'ELOOP' || - er.code === 'ENOMEM' || - er.code === 'ENAMETOOLONG' - ) -} - -function realpath (p, cache, cb) { - if (ok) { - return origRealpath(p, cache, cb) - } +const fixWinEPERM = (p, options, er, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') - if (typeof cache === 'function') { - cb = cache - cache = null - } - origRealpath(p, cache, function (er, result) { - if (newError(er)) { - old.realpath(p, cache, cb) - } else { - cb(er, result) - } + options.chmod(p, 0o666, er2 => { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, (er3, stats) => { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) }) } -function realpathSync (p, cache) { - if (ok) { - return origRealpathSync(p, cache) - } +const fixWinEPERMSync = (p, options, er) => { + assert(p) + assert(options) try { - return origRealpathSync(p, cache) - } catch (er) { - if (newError(er)) { - return old.realpathSync(p, cache) - } else { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else throw er - } } -} -function monkeypatch () { - fs.realpath = realpath - fs.realpathSync = realpathSync -} + let stats + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } -function unmonkeypatch () { - fs.realpath = origRealpath - fs.realpathSync = origRealpathSync + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) } +const rmdir = (p, options, originalEr, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') -/***/ }), - -/***/ 306: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var concatMap = __webpack_require__(896); -var balanced = __webpack_require__(621); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) } -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} +const rmkids = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); + options.readdir(p, (er, files) => { + if (er) + return cb(er) + let n = files.length + if (n === 0) + return options.rmdir(p, cb) + let errState + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) } +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +const rimrafSync = (p, options) => { + options = options || {} + defaults(options) -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - var parts = []; - var m = balanced('{', '}', str); + let results - if (!m) - return str.split(','); + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); + if (!results.length) + return - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } + for (let i = 0; i < results.length; i++) { + const p = results[i] - parts.push.apply(parts, p); + let st + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return - return parts; -} + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } -function expandTop(str) { - if (!str) - return []; + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); + rmdirSync(p, options, er) + } } - - return expand(escapeBraces(str), true).map(unescapeBraces); } -function identity(e) { - return e; +const rmdirSync = (p, options, originalEr) => { + assert(p) + assert(options) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } } -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} +const rmkidsSync = (p, options) => { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const retries = isWindows ? 100 : 1 + let i = 0 + do { + let threw = true + try { + const ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) } -function expand(str, isTop) { - var expansions = []; +module.exports = rimraf +rimraf.sync = rimrafSync - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } +/***/ }), - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } +/***/ 257: +/***/ (function(module, __unusedexports, __webpack_require__) { - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. +module.exports = __webpack_require__(913); - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - var N; +/***/ }), - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); +/***/ 313: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - N = []; +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); +var pathModule = __webpack_require__(622); +var isWindows = process.platform === 'win32'; +var fs = __webpack_require__(747); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); } - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } } } +} - return expansions; +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); } +var normalize = pathModule.normalize; +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} -/***/ }), +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} -/***/ 315: -/***/ (function(module) { +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; } -} + var original = p, + seenLinks = {}, + knownHard = {}; -/***/ }), + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; -/***/ 327: -/***/ (function(__unusedmodule, exports) { + start(); -"use strict"; + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Indicates whether a pattern matches a path - */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } -/***/ }), + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; -/***/ 357: -/***/ (function(module) { + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } -module.exports = require("assert"); + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } -/***/ }), + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } -/***/ 359: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } -"use strict"; + if (cache) cache[original] = p; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + return p; }; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DefaultArtifactClient = void 0; -const core = __importStar(__webpack_require__(470)); -const upload_specification_1 = __webpack_require__(590); -const upload_http_client_1 = __webpack_require__(608); -const utils_1 = __webpack_require__(870); -const path_and_artifact_name_validation_1 = __webpack_require__(553); -const download_http_client_1 = __webpack_require__(855); -const download_specification_1 = __webpack_require__(532); -const config_variables_1 = __webpack_require__(401); -const path_1 = __webpack_require__(622); -class DefaultArtifactClient { - /** - * Constructs a DefaultArtifactClient - */ - static create() { - return new DefaultArtifactClient(); - } - /** - * Uploads an artifact - */ - uploadArtifact(name, files, rootDirectory, options) { - return __awaiter(this, void 0, void 0, function* () { - core.info(`Starting artifact upload -For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); - path_and_artifact_name_validation_1.checkArtifactName(name); - // Get specification for the files being uploaded - const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); - const uploadResponse = { - artifactName: name, - artifactItems: [], - size: 0, - failedItems: [] - }; - const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); - if (uploadSpecification.length === 0) { - core.warning(`No files found that can be uploaded`); - } - else { - // Create an entry for the artifact in the file container - const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); - if (!response.fileContainerResourceUrl) { - core.debug(response.toString()); - throw new Error('No URL provided by the Artifact Service to upload an artifact to'); - } - core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); - // Upload each of the files that were found concurrently - const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - // Update the size of the artifact to indicate we are done uploading - // The uncompressed size is used for display when downloading a zip of the artifact from the UI - core.info(`File upload process has finished. Finalizing the artifact upload`); - yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); - if (uploadResult.failedItems.length > 0) { - core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); - } - else { - core.info(`Artifact has been finalized. All files have been successfully uploaded!`); - } - core.info(` -The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes -The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage -Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`); - uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); - uploadResponse.size = uploadResult.uploadSize; - uploadResponse.failedItems = uploadResult.failedItems; - } - return uploadResponse; - }); - } - downloadArtifact(name, path, options) { - return __awaiter(this, void 0, void 0, function* () { - const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); - const artifacts = yield downloadHttpClient.listArtifacts(); - if (artifacts.count === 0) { - throw new Error(`Unable to find any artifacts for the associated workflow`); - } - const artifactToDownload = artifacts.value.find(artifact => { - return artifact.name === name; - }); - if (!artifactToDownload) { - throw new Error(`Unable to find an artifact with the name: ${name}`); - } - const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path) { - path = config_variables_1.getWorkSpaceDirectory(); - } - path = path_1.normalize(path); - path = path_1.resolve(path); - // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories - const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); - if (downloadSpecification.filesToDownload.length === 0) { - core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); - } - else { - // Create all necessary directories recursively before starting any download - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - core.info('Directory structure has been setup for the artifact'); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); - yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); - } - return { - artifactName: name, - downloadPath: downloadSpecification.rootDownloadLocation - }; - }); - } - downloadAllArtifacts(path) { - return __awaiter(this, void 0, void 0, function* () { - const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); - const response = []; - const artifacts = yield downloadHttpClient.listArtifacts(); - if (artifacts.count === 0) { - core.info('Unable to find any artifacts for the associated workflow'); - return response; - } - if (!path) { - path = config_variables_1.getWorkSpaceDirectory(); - } - path = path_1.normalize(path); - path = path_1.resolve(path); - let downloadedArtifacts = 0; - while (downloadedArtifacts < artifacts.count) { - const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; - downloadedArtifacts += 1; - core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); - // Get container entries for the specific artifact - const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); - if (downloadSpecification.filesToDownload.length === 0) { - core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); - } - else { - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); - yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); - } - response.push({ - artifactName: currentArtifactToDownload.name, - downloadPath: downloadSpecification.rootDownloadLocation - }); - } - return response; - }); - } -} -exports.DefaultArtifactClient = DefaultArtifactClient; -//# sourceMappingURL=artifact-client.js.map -/***/ }), +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } -/***/ 383: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + // make p is absolute + p = pathModule.resolve(p); -"use strict"; + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; + + +/***/ }), + +/***/ 318: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const assert = __webpack_require__(357); const path = __webpack_require__(622); -const pathHelper = __webpack_require__(972); +const pathHelper = __webpack_require__(175); const IS_WINDOWS = process.platform === 'win32'; /** * Helper class for parsing paths into segments @@ -4400,2148 +4303,2480 @@ exports.Path = Path; /***/ }), -/***/ 385: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 335: +/***/ (function(__unusedmodule, exports) { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; +/** + * CRC64: cyclic redundancy check, 64-bits + * + * In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to + * validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg, + * but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go + * + * This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that + * is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27 + */ Object.defineProperty(exports, "__esModule", { value: true }); -const core = __importStar(__webpack_require__(470)); -const artifact_1 = __webpack_require__(214); -const search_1 = __webpack_require__(575); -const input_helper_1 = __webpack_require__(583); -const constants_1 = __webpack_require__(694); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const inputs = input_helper_1.getInputs(); - const searchResult = yield search_1.findFilesToUpload(inputs.searchPath); - if (searchResult.filesToUpload.length === 0) { - // No files were found, different use cases warrant different types of behavior if nothing is found - switch (inputs.ifNoFilesFound) { - case constants_1.NoFileOptions.warn: { - core.warning(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); - break; - } - case constants_1.NoFileOptions.error: { - core.setFailed(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); - break; - } - case constants_1.NoFileOptions.ignore: { - core.info(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); - break; - } - } - } - else { - const s = searchResult.filesToUpload.length === 1 ? '' : 's'; - core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`); - core.debug(`Root artifact directory is ${searchResult.rootDirectory}`); - if (searchResult.filesToUpload.length > 10000) { - core.warning(`There are over 10,000 files in this artifact, consider creating an archive before upload to improve the upload performance.`); - } - const artifactClient = artifact_1.create(); - const options = { - continueOnError: false - }; - if (inputs.retentionDays) { - options.retentionDays = inputs.retentionDays; - } - const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options); - if (uploadResponse.failedItems.length > 0) { - core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`); - } - else { - core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`); - } - } +// when transpile target is >= ES2020 (after dropping node 12) these can be changed to bigint literals - ts(2737) +const PREGEN_POLY_TABLE = [ + BigInt('0x0000000000000000'), + BigInt('0x7F6EF0C830358979'), + BigInt('0xFEDDE190606B12F2'), + BigInt('0x81B31158505E9B8B'), + BigInt('0xC962E5739841B68F'), + BigInt('0xB60C15BBA8743FF6'), + BigInt('0x37BF04E3F82AA47D'), + BigInt('0x48D1F42BC81F2D04'), + BigInt('0xA61CECB46814FE75'), + BigInt('0xD9721C7C5821770C'), + BigInt('0x58C10D24087FEC87'), + BigInt('0x27AFFDEC384A65FE'), + BigInt('0x6F7E09C7F05548FA'), + BigInt('0x1010F90FC060C183'), + BigInt('0x91A3E857903E5A08'), + BigInt('0xEECD189FA00BD371'), + BigInt('0x78E0FF3B88BE6F81'), + BigInt('0x078E0FF3B88BE6F8'), + BigInt('0x863D1EABE8D57D73'), + BigInt('0xF953EE63D8E0F40A'), + BigInt('0xB1821A4810FFD90E'), + BigInt('0xCEECEA8020CA5077'), + BigInt('0x4F5FFBD87094CBFC'), + BigInt('0x30310B1040A14285'), + BigInt('0xDEFC138FE0AA91F4'), + BigInt('0xA192E347D09F188D'), + BigInt('0x2021F21F80C18306'), + BigInt('0x5F4F02D7B0F40A7F'), + BigInt('0x179EF6FC78EB277B'), + BigInt('0x68F0063448DEAE02'), + BigInt('0xE943176C18803589'), + BigInt('0x962DE7A428B5BCF0'), + BigInt('0xF1C1FE77117CDF02'), + BigInt('0x8EAF0EBF2149567B'), + BigInt('0x0F1C1FE77117CDF0'), + BigInt('0x7072EF2F41224489'), + BigInt('0x38A31B04893D698D'), + BigInt('0x47CDEBCCB908E0F4'), + BigInt('0xC67EFA94E9567B7F'), + BigInt('0xB9100A5CD963F206'), + BigInt('0x57DD12C379682177'), + BigInt('0x28B3E20B495DA80E'), + BigInt('0xA900F35319033385'), + BigInt('0xD66E039B2936BAFC'), + BigInt('0x9EBFF7B0E12997F8'), + BigInt('0xE1D10778D11C1E81'), + BigInt('0x606216208142850A'), + BigInt('0x1F0CE6E8B1770C73'), + BigInt('0x8921014C99C2B083'), + BigInt('0xF64FF184A9F739FA'), + BigInt('0x77FCE0DCF9A9A271'), + BigInt('0x08921014C99C2B08'), + BigInt('0x4043E43F0183060C'), + BigInt('0x3F2D14F731B68F75'), + BigInt('0xBE9E05AF61E814FE'), + BigInt('0xC1F0F56751DD9D87'), + BigInt('0x2F3DEDF8F1D64EF6'), + BigInt('0x50531D30C1E3C78F'), + BigInt('0xD1E00C6891BD5C04'), + BigInt('0xAE8EFCA0A188D57D'), + BigInt('0xE65F088B6997F879'), + BigInt('0x9931F84359A27100'), + BigInt('0x1882E91B09FCEA8B'), + BigInt('0x67EC19D339C963F2'), + BigInt('0xD75ADABD7A6E2D6F'), + BigInt('0xA8342A754A5BA416'), + BigInt('0x29873B2D1A053F9D'), + BigInt('0x56E9CBE52A30B6E4'), + BigInt('0x1E383FCEE22F9BE0'), + BigInt('0x6156CF06D21A1299'), + BigInt('0xE0E5DE5E82448912'), + BigInt('0x9F8B2E96B271006B'), + BigInt('0x71463609127AD31A'), + BigInt('0x0E28C6C1224F5A63'), + BigInt('0x8F9BD7997211C1E8'), + BigInt('0xF0F5275142244891'), + BigInt('0xB824D37A8A3B6595'), + BigInt('0xC74A23B2BA0EECEC'), + BigInt('0x46F932EAEA507767'), + BigInt('0x3997C222DA65FE1E'), + BigInt('0xAFBA2586F2D042EE'), + BigInt('0xD0D4D54EC2E5CB97'), + BigInt('0x5167C41692BB501C'), + BigInt('0x2E0934DEA28ED965'), + BigInt('0x66D8C0F56A91F461'), + BigInt('0x19B6303D5AA47D18'), + BigInt('0x980521650AFAE693'), + BigInt('0xE76BD1AD3ACF6FEA'), + BigInt('0x09A6C9329AC4BC9B'), + BigInt('0x76C839FAAAF135E2'), + BigInt('0xF77B28A2FAAFAE69'), + BigInt('0x8815D86ACA9A2710'), + BigInt('0xC0C42C4102850A14'), + BigInt('0xBFAADC8932B0836D'), + BigInt('0x3E19CDD162EE18E6'), + BigInt('0x41773D1952DB919F'), + BigInt('0x269B24CA6B12F26D'), + BigInt('0x59F5D4025B277B14'), + BigInt('0xD846C55A0B79E09F'), + BigInt('0xA72835923B4C69E6'), + BigInt('0xEFF9C1B9F35344E2'), + BigInt('0x90973171C366CD9B'), + BigInt('0x1124202993385610'), + BigInt('0x6E4AD0E1A30DDF69'), + BigInt('0x8087C87E03060C18'), + BigInt('0xFFE938B633338561'), + BigInt('0x7E5A29EE636D1EEA'), + BigInt('0x0134D92653589793'), + BigInt('0x49E52D0D9B47BA97'), + BigInt('0x368BDDC5AB7233EE'), + BigInt('0xB738CC9DFB2CA865'), + BigInt('0xC8563C55CB19211C'), + BigInt('0x5E7BDBF1E3AC9DEC'), + BigInt('0x21152B39D3991495'), + BigInt('0xA0A63A6183C78F1E'), + BigInt('0xDFC8CAA9B3F20667'), + BigInt('0x97193E827BED2B63'), + BigInt('0xE877CE4A4BD8A21A'), + BigInt('0x69C4DF121B863991'), + BigInt('0x16AA2FDA2BB3B0E8'), + BigInt('0xF86737458BB86399'), + BigInt('0x8709C78DBB8DEAE0'), + BigInt('0x06BAD6D5EBD3716B'), + BigInt('0x79D4261DDBE6F812'), + BigInt('0x3105D23613F9D516'), + BigInt('0x4E6B22FE23CC5C6F'), + BigInt('0xCFD833A67392C7E4'), + BigInt('0xB0B6C36E43A74E9D'), + BigInt('0x9A6C9329AC4BC9B5'), + BigInt('0xE50263E19C7E40CC'), + BigInt('0x64B172B9CC20DB47'), + BigInt('0x1BDF8271FC15523E'), + BigInt('0x530E765A340A7F3A'), + BigInt('0x2C608692043FF643'), + BigInt('0xADD397CA54616DC8'), + BigInt('0xD2BD67026454E4B1'), + BigInt('0x3C707F9DC45F37C0'), + BigInt('0x431E8F55F46ABEB9'), + BigInt('0xC2AD9E0DA4342532'), + BigInt('0xBDC36EC59401AC4B'), + BigInt('0xF5129AEE5C1E814F'), + BigInt('0x8A7C6A266C2B0836'), + BigInt('0x0BCF7B7E3C7593BD'), + BigInt('0x74A18BB60C401AC4'), + BigInt('0xE28C6C1224F5A634'), + BigInt('0x9DE29CDA14C02F4D'), + BigInt('0x1C518D82449EB4C6'), + BigInt('0x633F7D4A74AB3DBF'), + BigInt('0x2BEE8961BCB410BB'), + BigInt('0x548079A98C8199C2'), + BigInt('0xD53368F1DCDF0249'), + BigInt('0xAA5D9839ECEA8B30'), + BigInt('0x449080A64CE15841'), + BigInt('0x3BFE706E7CD4D138'), + BigInt('0xBA4D61362C8A4AB3'), + BigInt('0xC52391FE1CBFC3CA'), + BigInt('0x8DF265D5D4A0EECE'), + BigInt('0xF29C951DE49567B7'), + BigInt('0x732F8445B4CBFC3C'), + BigInt('0x0C41748D84FE7545'), + BigInt('0x6BAD6D5EBD3716B7'), + BigInt('0x14C39D968D029FCE'), + BigInt('0x95708CCEDD5C0445'), + BigInt('0xEA1E7C06ED698D3C'), + BigInt('0xA2CF882D2576A038'), + BigInt('0xDDA178E515432941'), + BigInt('0x5C1269BD451DB2CA'), + BigInt('0x237C997575283BB3'), + BigInt('0xCDB181EAD523E8C2'), + BigInt('0xB2DF7122E51661BB'), + BigInt('0x336C607AB548FA30'), + BigInt('0x4C0290B2857D7349'), + BigInt('0x04D364994D625E4D'), + BigInt('0x7BBD94517D57D734'), + BigInt('0xFA0E85092D094CBF'), + BigInt('0x856075C11D3CC5C6'), + BigInt('0x134D926535897936'), + BigInt('0x6C2362AD05BCF04F'), + BigInt('0xED9073F555E26BC4'), + BigInt('0x92FE833D65D7E2BD'), + BigInt('0xDA2F7716ADC8CFB9'), + BigInt('0xA54187DE9DFD46C0'), + BigInt('0x24F29686CDA3DD4B'), + BigInt('0x5B9C664EFD965432'), + BigInt('0xB5517ED15D9D8743'), + BigInt('0xCA3F8E196DA80E3A'), + BigInt('0x4B8C9F413DF695B1'), + BigInt('0x34E26F890DC31CC8'), + BigInt('0x7C339BA2C5DC31CC'), + BigInt('0x035D6B6AF5E9B8B5'), + BigInt('0x82EE7A32A5B7233E'), + BigInt('0xFD808AFA9582AA47'), + BigInt('0x4D364994D625E4DA'), + BigInt('0x3258B95CE6106DA3'), + BigInt('0xB3EBA804B64EF628'), + BigInt('0xCC8558CC867B7F51'), + BigInt('0x8454ACE74E645255'), + BigInt('0xFB3A5C2F7E51DB2C'), + BigInt('0x7A894D772E0F40A7'), + BigInt('0x05E7BDBF1E3AC9DE'), + BigInt('0xEB2AA520BE311AAF'), + BigInt('0x944455E88E0493D6'), + BigInt('0x15F744B0DE5A085D'), + BigInt('0x6A99B478EE6F8124'), + BigInt('0x224840532670AC20'), + BigInt('0x5D26B09B16452559'), + BigInt('0xDC95A1C3461BBED2'), + BigInt('0xA3FB510B762E37AB'), + BigInt('0x35D6B6AF5E9B8B5B'), + BigInt('0x4AB846676EAE0222'), + BigInt('0xCB0B573F3EF099A9'), + BigInt('0xB465A7F70EC510D0'), + BigInt('0xFCB453DCC6DA3DD4'), + BigInt('0x83DAA314F6EFB4AD'), + BigInt('0x0269B24CA6B12F26'), + BigInt('0x7D0742849684A65F'), + BigInt('0x93CA5A1B368F752E'), + BigInt('0xECA4AAD306BAFC57'), + BigInt('0x6D17BB8B56E467DC'), + BigInt('0x12794B4366D1EEA5'), + BigInt('0x5AA8BF68AECEC3A1'), + BigInt('0x25C64FA09EFB4AD8'), + BigInt('0xA4755EF8CEA5D153'), + BigInt('0xDB1BAE30FE90582A'), + BigInt('0xBCF7B7E3C7593BD8'), + BigInt('0xC399472BF76CB2A1'), + BigInt('0x422A5673A732292A'), + BigInt('0x3D44A6BB9707A053'), + BigInt('0x759552905F188D57'), + BigInt('0x0AFBA2586F2D042E'), + BigInt('0x8B48B3003F739FA5'), + BigInt('0xF42643C80F4616DC'), + BigInt('0x1AEB5B57AF4DC5AD'), + BigInt('0x6585AB9F9F784CD4'), + BigInt('0xE436BAC7CF26D75F'), + BigInt('0x9B584A0FFF135E26'), + BigInt('0xD389BE24370C7322'), + BigInt('0xACE74EEC0739FA5B'), + BigInt('0x2D545FB4576761D0'), + BigInt('0x523AAF7C6752E8A9'), + BigInt('0xC41748D84FE75459'), + BigInt('0xBB79B8107FD2DD20'), + BigInt('0x3ACAA9482F8C46AB'), + BigInt('0x45A459801FB9CFD2'), + BigInt('0x0D75ADABD7A6E2D6'), + BigInt('0x721B5D63E7936BAF'), + BigInt('0xF3A84C3BB7CDF024'), + BigInt('0x8CC6BCF387F8795D'), + BigInt('0x620BA46C27F3AA2C'), + BigInt('0x1D6554A417C62355'), + BigInt('0x9CD645FC4798B8DE'), + BigInt('0xE3B8B53477AD31A7'), + BigInt('0xAB69411FBFB21CA3'), + BigInt('0xD407B1D78F8795DA'), + BigInt('0x55B4A08FDFD90E51'), + BigInt('0x2ADA5047EFEC8728') +]; +class CRC64 { + constructor() { + this._crc = BigInt(0); + } + update(data) { + const buffer = typeof data === 'string' ? Buffer.from(data) : data; + let crc = CRC64.flip64Bits(this._crc); + for (const dataByte of buffer) { + const crcByte = Number(crc & BigInt(0xff)); + crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8)); } - catch (err) { - core.setFailed(err.message); + this._crc = CRC64.flip64Bits(crc); + } + digest(encoding) { + switch (encoding) { + case 'hex': + return this._crc.toString(16).toUpperCase(); + case 'base64': + return this.toBuffer().toString('base64'); + default: + return this.toBuffer(); } - }); + } + toBuffer() { + return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff)))); + } + static flip64Bits(n) { + return (BigInt(1) << BigInt(64)) - BigInt(1) - n; + } } -run(); +exports.default = CRC64; +//# sourceMappingURL=crc64.js.map + +/***/ }), + +/***/ 357: +/***/ (function(module) { +module.exports = require("assert"); /***/ }), -/***/ 401: -/***/ (function(__unusedmodule, exports) { +/***/ 365: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; -// The number of concurrent uploads that happens at the same time -function getUploadFileConcurrency() { - return 2; -} -exports.getUploadFileConcurrency = getUploadFileConcurrency; -// When uploading large files that can't be uploaded with a single http call, this controls -// the chunk size that is used during upload -function getUploadChunkSize() { - return 8 * 1024 * 1024; // 8 MB Chunks -} -exports.getUploadChunkSize = getUploadChunkSize; -// The maximum number of retries that can be attempted before an upload or download fails -function getRetryLimit() { - return 5; -} -exports.getRetryLimit = getRetryLimit; -// With exponential backoff, the larger the retry count, the larger the wait time before another attempt -// The retry multiplier controls by how much the backOff time increases depending on the number of retries -function getRetryMultiplier() { - return 1.5; -} -exports.getRetryMultiplier = getRetryMultiplier; -// The initial wait time if an upload or download fails and a retry is being attempted for the first time -function getInitialRetryIntervalInMilliseconds() { - return 3000; -} -exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; -// The number of concurrent downloads that happens at the same time -function getDownloadFileConcurrency() { - return 2; -} -exports.getDownloadFileConcurrency = getDownloadFileConcurrency; -function getRuntimeToken() { - const token = process.env['ACTIONS_RUNTIME_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); +const pathHelper = __webpack_require__(175); +const internal_match_kind_1 = __webpack_require__(432); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; } - return token; -} -exports.getRuntimeToken = getRuntimeToken; -function getRuntimeUrl() { - const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); - } - return runtimeUrl; -} -exports.getRuntimeUrl = getRuntimeUrl; -function getWorkFlowRunId() { - const workFlowRunId = process.env['GITHUB_RUN_ID']; - if (!workFlowRunId) { - throw new Error('Unable to get GITHUB_RUN_ID env variable'); + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } } - return workFlowRunId; + return result; } -exports.getWorkFlowRunId = getWorkFlowRunId; -function getWorkSpaceDirectory() { - const workspaceDirectory = process.env['GITHUB_WORKSPACE']; - if (!workspaceDirectory) { - throw new Error('Unable to get GITHUB_WORKSPACE env variable'); +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } } - return workspaceDirectory; + return result; } -exports.getWorkSpaceDirectory = getWorkSpaceDirectory; -function getRetentionDays() { - return process.env['GITHUB_RETENTION_DAYS']; +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } -exports.getRetentionDays = getRetentionDays; -//# sourceMappingURL=config-variables.js.map +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map /***/ }), -/***/ 402: -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. - -module.exports = glob - -var fs = __webpack_require__(747) -var rp = __webpack_require__(302) -var minimatch = __webpack_require__(93) -var Minimatch = minimatch.Minimatch -var inherits = __webpack_require__(689) -var EE = __webpack_require__(614).EventEmitter -var path = __webpack_require__(622) -var assert = __webpack_require__(357) -var isAbsolute = __webpack_require__(681) -var globSync = __webpack_require__(245) -var common = __webpack_require__(856) -var alphasort = common.alphasort -var alphasorti = common.alphasorti -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = __webpack_require__(674) -var util = __webpack_require__(669) -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -var once = __webpack_require__(49) - -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} - - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) - } - - return new Glob(pattern, options, cb) -} - -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync - -// old api surface -glob.glob = glob - -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } - - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] - } - return origin -} - -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set +/***/ 390: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - if (!pattern) - return false +"use strict"; - if (set.length > 1) - return true +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DefaultArtifactClient = void 0; +const core = __importStar(__webpack_require__(676)); +const upload_specification_1 = __webpack_require__(30); +const upload_http_client_1 = __webpack_require__(800); +const utils_1 = __webpack_require__(128); +const path_and_artifact_name_validation_1 = __webpack_require__(547); +const download_http_client_1 = __webpack_require__(20); +const download_specification_1 = __webpack_require__(426); +const config_variables_1 = __webpack_require__(750); +const path_1 = __webpack_require__(622); +class DefaultArtifactClient { + /** + * Constructs a DefaultArtifactClient + */ + static create() { + return new DefaultArtifactClient(); + } + /** + * Uploads an artifact + */ + uploadArtifact(name, files, rootDirectory, options) { + return __awaiter(this, void 0, void 0, function* () { + core.info(`Starting artifact upload +For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); + path_and_artifact_name_validation_1.checkArtifactName(name); + // Get specification for the files being uploaded + const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); + const uploadResponse = { + artifactName: name, + artifactItems: [], + size: 0, + failedItems: [] + }; + const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); + if (uploadSpecification.length === 0) { + core.warning(`No files found that can be uploaded`); + } + else { + // Create an entry for the artifact in the file container + const response = yield uploadHttpClient.createArtifactInFileContainer(name, options); + if (!response.fileContainerResourceUrl) { + core.debug(response.toString()); + throw new Error('No URL provided by the Artifact Service to upload an artifact to'); + } + core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); + // Upload each of the files that were found concurrently + const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); + // Update the size of the artifact to indicate we are done uploading + // The uncompressed size is used for display when downloading a zip of the artifact from the UI + core.info(`File upload process has finished. Finalizing the artifact upload`); + yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); + if (uploadResult.failedItems.length > 0) { + core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + } + else { + core.info(`Artifact has been finalized. All files have been successfully uploaded!`); + } + core.info(` +The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes +The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true - } - - return false +Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`); + uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); + uploadResponse.size = uploadResult.uploadSize; + uploadResponse.failedItems = uploadResult.failedItems; + } + return uploadResponse; + }); + } + downloadArtifact(name, path, options) { + return __awaiter(this, void 0, void 0, function* () { + const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); + const artifacts = yield downloadHttpClient.listArtifacts(); + if (artifacts.count === 0) { + throw new Error(`Unable to find any artifacts for the associated workflow`); + } + const artifactToDownload = artifacts.value.find(artifact => { + return artifact.name === name; + }); + if (!artifactToDownload) { + throw new Error(`Unable to find an artifact with the name: ${name}`); + } + const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); + if (!path) { + path = config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories + const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + } + else { + // Create all necessary directories recursively before starting any download + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + core.info('Directory structure has been setup for the artifact'); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + return { + artifactName: name, + downloadPath: downloadSpecification.rootDownloadLocation + }; + }); + } + downloadAllArtifacts(path) { + return __awaiter(this, void 0, void 0, function* () { + const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); + const response = []; + const artifacts = yield downloadHttpClient.listArtifacts(); + if (artifacts.count === 0) { + core.info('Unable to find any artifacts for the associated workflow'); + return response; + } + if (!path) { + path = config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + let downloadedArtifacts = 0; + while (downloadedArtifacts < artifacts.count) { + const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; + downloadedArtifacts += 1; + core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); + // Get container entries for the specific artifact + const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); + const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + } + else { + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + response.push({ + artifactName: currentArtifactToDownload.name, + downloadPath: downloadSpecification.rootDownloadLocation + }); + } + return response; + }); + } } +exports.DefaultArtifactClient = DefaultArtifactClient; +//# sourceMappingURL=artifact-client.js.map -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } - - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) +/***/ }), - setopts(this, pattern, options) - this._didRealPath = false +/***/ 409: +/***/ (function(module) { - // process each pattern in the minimatch set - var n = this.minimatch.set.length +"use strict"; - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) - } +function posix(path) { + return path.charAt(0) === '/'; +} - var self = this - this._processing = 0 +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); - this._emitQueue = [] - this._processQueue = [] - this.paused = false + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} - if (this.noprocess) - return this +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; - if (n === 0) - return done() - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false +/***/ }), - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() - } - } - } -} +/***/ 413: +/***/ (function(module) { -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return +module.exports = require("stream"); - if (this.realpath && !this._didRealpath) - return this._realpath() +/***/ }), - common.finish(this) - this.emit('end', this.found) -} +/***/ 417: +/***/ (function(module) { -Glob.prototype._realpath = function () { - if (this._didRealpath) - return +module.exports = require("crypto"); - this._didRealpath = true +/***/ }), - var n = this.matches.length - if (n === 0) - return this._finish() +/***/ 424: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) +"use strict"; - function next () { - if (--n === 0) - self._finish() - } +// For internal use, subject to change. +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__webpack_require__(747)); +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(615); +function issueCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); } +exports.issueCommand = issueCommand; +//# sourceMappingURL=file-command.js.map -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() - - var found = Object.keys(matchset) - var self = this - var n = found.length +/***/ }), - if (n === 0) - return cb() +/***/ 426: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here +"use strict"; - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getDownloadSpecification = void 0; +const path = __importStar(__webpack_require__(622)); +/** + * Creates a specification for a set of files that will be downloaded + * @param artifactName the name of the artifact + * @param artifactEntries a set of container entries that describe that files that make up an artifact + * @param downloadPath the path where the artifact will be downloaded to + * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to + */ +function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { + // use a set for the directory paths so that there are no duplicates + const directories = new Set(); + const specifications = { + rootDownloadLocation: includeRootDirectory + ? path.join(downloadPath, artifactName) + : downloadPath, + directoryStructure: [], + emptyFilesToCreate: [], + filesToDownload: [] + }; + for (const entry of artifactEntries) { + // Ignore artifacts in the container that don't begin with the same name + if (entry.path.startsWith(`${artifactName}/`) || + entry.path.startsWith(`${artifactName}\\`)) { + // normalize all separators to the local OS + const normalizedPathEntry = path.normalize(entry.path); + // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path + const filePath = path.join(downloadPath, includeRootDirectory + ? normalizedPathEntry + : normalizedPathEntry.replace(artifactName, '')); + // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' + // itemType cannot be relied upon. The file must be used to determine the directory structure + if (entry.itemType === 'file') { + // Get the directories that we need to create from the filePath for each individual file + directories.add(path.dirname(filePath)); + if (entry.fileLength === 0) { + // An empty file was uploaded, create the empty files locally so that no extra http calls are made + specifications.emptyFilesToCreate.push(filePath); + } + else { + specifications.filesToDownload.push({ + sourceLocation: entry.contentLocation, + targetPath: filePath + }); + } + } + } + } + specifications.directoryStructure = Array.from(directories); + return specifications; } +exports.getDownloadSpecification = getDownloadSpecification; +//# sourceMappingURL=download-specification.js.map -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} +/***/ }), -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} +/***/ 432: +/***/ (function(__unusedmodule, exports) { -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), + +/***/ 459: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +exports.alphasort = alphasort +exports.alphasorti = alphasorti +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) } -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } +var path = __webpack_require__(622) +var minimatch = __webpack_require__(727) +var isAbsolute = __webpack_require__(409) +var Minimatch = minimatch.Minimatch + +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) } -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } - } +function alphasort (a, b) { + return a.localeCompare(b) } -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') +function setupIgnores (self, options) { + self.ignore = options.ignore || [] - if (this.aborted) - return + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) } +} - //console.error('PROCESS %d', this._processing, pattern) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) } - // now n is the index of the first one that is *not* a string. - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break +function setopts (self, pattern, options) { + if (!options) + options = {} - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern } - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute - var abs = this._makeAbs(read) + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() + setupIgnores(self, options) - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) -} - -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd } - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return cb() - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) else - e = prefix + e + m.forEach(function (m) { + all[m] = true + }) } - this._process([e].concat(remain), index, inGlobStar, cb) } - cb() -} -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return + if (!nou) + all = Object.keys(all) - if (isIgnored(this, e)) - return + if (!self.nosort) + all = all.sort(self.nocase ? alphasorti : alphasort) - if (this.paused) { - this._emitQueue.push([index, e]) - return + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } } - var abs = isAbsolute(e) ? e : this._makeAbs(e) + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) - if (this.mark) - e = this._mark(e) + self.found = all +} - if (this.absolute) - e = abs +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' - if (this.matches[index][e]) - return + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } } - this.matches[index][e] = true + return m +} - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } - this.emit('match', e) + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs } -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} - if (lstatcb) - fs.lstat(abs, lstatcb) +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) - } -} +/***/ }), -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return +/***/ 479: +/***/ (function(__unusedmodule, exports) { - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return - - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() - - if (Array.isArray(c)) - return cb(null, c) - } +"use strict"; - var self = this - fs.readdir(abs, readdirCb(this, abs, cb)) +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } - -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) - } +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } - -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return - - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); } - } - - this.cache[abs] = entries - return cb(null, entries) } +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return +/***/ }), - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break +/***/ 507: +/***/ (function(module) { - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break +"use strict"; - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); - return cb() -} + var r = range(a, b, str); -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; } +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; - var isSym = this.symlinks[abs] - var len = entries.length + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue + bi = str.indexOf(b, i + 1); + } - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) + i = ai < bi && ai >= 0 ? ai : bi; + } - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) + if (begs.length) { + result = [ left, right ]; + } } - cb() -} - -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) + return result; } -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { - - //console.error('ps2', prefix, exists) - if (!this.matches[index]) - this.matches[index] = Object.create(null) - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() +/***/ }), - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } +/***/ 547: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') +"use strict"; - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} - -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return cb() - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) - - if (needDir && c === 'FILE') - return cb() - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkArtifactFilePath = exports.checkArtifactName = void 0; +const core_1 = __webpack_require__(676); +/** + * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected + * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain + * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an + * individual filesystem/platform will not be supported on all fileSystems/platforms + * + * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone + */ +const invalidArtifactFilePathCharacters = new Map([ + ['"', ' Double quote "'], + [':', ' Colon :'], + ['<', ' Less than <'], + ['>', ' Greater than >'], + ['|', ' Vertical bar |'], + ['*', ' Asterisk *'], + ['?', ' Question mark ?'], + ['\r', ' Carriage return \\r'], + ['\n', ' Line feed \\n'] +]); +const invalidArtifactNameCharacters = new Map([ + ...invalidArtifactFilePathCharacters, + ['\\', ' Backslash \\'], + ['/', ' Forward slash /'] +]); +/** + * Scans the name of the artifact to make sure there are no illegal characters + */ +function checkArtifactName(name) { + if (!name) { + throw new Error(`Artifact name: ${name}, is incorrectly provided`); } - } - - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - fs.lstat(abs, statcb) - - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) { + if (name.includes(invalidCharacterKey)) { + throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()} + +These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); + } } - } + core_1.info(`Artifact name is valid!`); } - -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() - } - - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat - - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return cb() - - return cb(null, c, stat) +exports.checkArtifactName = checkArtifactName; +/** + * Scans the name of the filePath used to make sure there are no illegal characters + */ +function checkArtifactFilePath(path) { + if (!path) { + throw new Error(`Artifact path: ${path}, is incorrectly provided`); + } + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { + if (path.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} + +The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. + `); + } + } } - - -/***/ }), - -/***/ 413: -/***/ (function(module) { - -module.exports = require("stream"); - -/***/ }), - -/***/ 417: -/***/ (function(module) { - -module.exports = require("crypto"); +exports.checkArtifactFilePath = checkArtifactFilePath; +//# sourceMappingURL=path-and-artifact-name-validation.js.map /***/ }), -/***/ 431: +/***/ 582: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +/* eslint-disable @typescript-eslint/no-explicit-any */ +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); return result; }; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(82); +exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; +const http = __importStar(__webpack_require__(605)); +const https = __importStar(__webpack_require__(211)); +const pm = __importStar(__webpack_require__(230)); +const tunnel = __importStar(__webpack_require__(257)); +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); /** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); +function getProxyUrl(serverUrl) { + const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; } -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); } } -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + })); + }); + } } -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + const parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; } -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 452: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -exports.HttpManager = void 0; -const utils_1 = __webpack_require__(870); -/** - * Used for managing http clients during either upload or download - */ -class HttpManager { - constructor(clientCount, userAgent) { - if (clientCount < 1) { - throw new Error('There must be at least one client'); - } +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; this.userAgent = userAgent; - this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } } - getClient(index) { - return this.clients[index]; + options(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + }); } - // client disposal is necessary if a keep-alive connection is used to properly close the connection - // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 - disposeAndReplaceClient(index) { - this.clients[index].dispose(); - this.clients[index] = utils_1.createHttpClient(this.userAgent); + get(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + }); } - disposeAndReplaceAllClients() { - for (const [index] of this.clients.entries()) { - this.disposeAndReplaceClient(index); - } + del(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + }); + } + post(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + }); + } + patch(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + }); + } + put(requestUrl, data, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + }); + } + head(requestUrl, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + }); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return __awaiter(this, void 0, void 0, function* () { + return this.request(verb, requestUrl, stream, additionalHeaders); + }); } -} -exports.HttpManager = HttpManager; -//# sourceMappingURL=http-manager.js.map - -/***/ }), - -/***/ 470: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const command_1 = __webpack_require__(431); -const file_command_1 = __webpack_require__(102); -const utils_1 = __webpack_require__(82); -const os = __importStar(__webpack_require__(87)); -const path = __importStar(__webpack_require__(622)); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; /** - * A code indicating that the action was a failure + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); + getJson(requestUrl, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + const res = yield this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - else { - command_1.issueCommand('set-env', { name }, convertedVal); + postJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueCommand('PATH', inputPath); + putJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - else { - command_1.issueCommand('add-path', {}, inputPath); + patchJson(requestUrl, obj, additionalHeaders = {}) { + return __awaiter(this, void 0, void 0, function* () { + const data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + const res = yield this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + }); } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. The value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds an warning issue - * @param message warning issue message. Errors will be converted to string via toString() - */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -//# sourceMappingURL=core.js.map - -/***/ }), - -/***/ 489: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.retryHttpClientRequest = exports.retry = void 0; -const utils_1 = __webpack_require__(870); -const core = __importStar(__webpack_require__(470)); -const config_variables_1 = __webpack_require__(401); -function retry(name, operation, customErrorMessages, maxAttempts) { - return __awaiter(this, void 0, void 0, function* () { - let response = undefined; - let statusCode = undefined; - let isRetryable = false; - let errorMessage = ''; - let customErrorInformation = undefined; - let attempt = 1; - while (attempt <= maxAttempts) { - try { - response = yield operation(); - statusCode = response.message.statusCode; - if (utils_1.isSuccessStatusCode(statusCode)) { - return response; + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + const parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + do { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (const handler of this.handlers) { + if (handler.canHandleAuthentication(response)) { + authenticationHandler = handler; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } } - // Extra error information that we want to display if a particular response code is hit - if (statusCode) { - customErrorInformation = customErrorMessages.get(statusCode); + let redirectsRemaining = this._maxRedirects; + while (response.message.statusCode && + HttpRedirectCodes.includes(response.message.statusCode) && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + const parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol === 'https:' && + parsedUrl.protocol !== parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (const header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; } - isRetryable = utils_1.isRetryableStatusCode(statusCode); - errorMessage = `Artifact service responded with ${statusCode}`; - } - catch (error) { - isRetryable = true; - errorMessage = error.message; - } - if (!isRetryable) { - core.info(`${name} - Error is not retryable`); - if (response) { - utils_1.displayHttpDiagnostics(response); + if (!response.message.statusCode || + !HttpResponseRetryCodes.includes(response.message.statusCode)) { + // If not a retry code, return immediately instead of retrying + return response; } - break; - } - core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt)); - attempt++; + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } while (numTries < maxTries); + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); } - if (response) { - utils_1.displayHttpDiagnostics(response); + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + function callbackForResult(err, res) { + if (err) { + reject(err); + } + else if (!res) { + // If `err` is not passed, then `res` must be passed. + reject(new Error('Unknown error')); + } + else { + resolve(res); + } + } + this.requestRawWithCallback(info, data, callbackForResult); + }); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; + } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } - if (customErrorInformation) { - throw Error(`${name} failed: ${customErrorInformation}`); + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } } - throw Error(`${name} failed: ${errorMessage}`); - }); -} -exports.retry = retry; -function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) { - return __awaiter(this, void 0, void 0, function* () { - return yield retry(name, method, customErrorMessages, maxAttempts); - }); + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; + } + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + })); + }); + } } -exports.retryHttpClientRequest = retryHttpClientRequest; -//# sourceMappingURL=requestUtils.js.map +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map + +/***/ }), + +/***/ 605: +/***/ (function(module) { + +module.exports = require("http"); /***/ }), -/***/ 532: +/***/ 609: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -exports.getDownloadSpecification = void 0; -const path = __importStar(__webpack_require__(622)); -/** - * Creates a specification for a set of files that will be downloaded - * @param artifactName the name of the artifact - * @param artifactEntries a set of container entries that describe that files that make up an artifact - * @param downloadPath the path where the artifact will be downloaded to - * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to - */ -function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { - // use a set for the directory paths so that there are no duplicates - const directories = new Set(); - const specifications = { - rootDownloadLocation: includeRootDirectory - ? path.join(downloadPath, artifactName) - : downloadPath, - directoryStructure: [], - emptyFilesToCreate: [], - filesToDownload: [] - }; - for (const entry of artifactEntries) { - // Ignore artifacts in the container that don't begin with the same name - if (entry.path.startsWith(`${artifactName}/`) || - entry.path.startsWith(`${artifactName}\\`)) { - // normalize all separators to the local OS - const normalizedPathEntry = path.normalize(entry.path); - // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path - const filePath = path.join(downloadPath, includeRootDirectory - ? normalizedPathEntry - : normalizedPathEntry.replace(artifactName, '')); - // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' - // itemType cannot be relied upon. The file must be used to determine the directory structure - if (entry.itemType === 'file') { - // Get the directories that we need to create from the filePath for each individual file - directories.add(path.dirname(filePath)); - if (entry.fileLength === 0) { - // An empty file was uploaded, create the empty files locally so that no extra http calls are made - specifications.emptyFilesToCreate.push(filePath); +const assert = __webpack_require__(357); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +const pathHelper = __webpack_require__(175); +const minimatch_1 = __webpack_require__(727); +const internal_match_kind_1 = __webpack_require__(432); +const internal_path_1 = __webpack_require__(318); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, segments) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); + } + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); + } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep)) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an aboslute root. + itemPath = `${itemPath}${path.sep}`; + } + } + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; + } + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + } + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' + } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern) { + // Empty + assert(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + const homedir = os.homedir(); + assert(homedir, 'Unable to determine HOME directory'); + assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; + } + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); + } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; + } + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; + continue; + } + // Closed + else if (c2 === ']') { + closed = i2; + break; + } + // Otherwise + else { + set += c2; + } } - else { - specifications.filesToDownload.push({ - sourceLocation: entry.contentLocation, - targetPath: filePath - }); + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } } + // Otherwise fall thru } + // Append + literal += c; } + return literal; + } + /** + * Escapes regexp special characters + * https://javascript.info/regexp-escaping + */ + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); } - specifications.directoryStructure = Array.from(directories); - return specifications; } -exports.getDownloadSpecification = getDownloadSpecification; -//# sourceMappingURL=download-specification.js.map +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map /***/ }), -/***/ 539: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 614: +/***/ (function(module) { + +module.exports = require("events"); + +/***/ }), + +/***/ 615: +/***/ (function(__unusedmodule, exports) { "use strict"; +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", { value: true }); -const http = __webpack_require__(605); -const https = __webpack_require__(211); -const pm = __webpack_require__(950); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); /** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); + else if (typeof input === 'string' || input instanceof String) { + return input; } + return JSON.stringify(input); } -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); +exports.toCommandValue = toCommandValue; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 622: +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), + +/***/ 628: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(676)); +const constants_1 = __webpack_require__(858); +/** + * Helper to get all the inputs for the action + */ +function getInputs() { + const name = core.getInput(constants_1.Inputs.Name); + const path = core.getInput(constants_1.Inputs.Path, { required: true }); + const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound); + const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound]; + if (!noFileBehavior) { + core.setFailed(`Unrecognized ${constants_1.Inputs.IfNoFilesFound} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(constants_1.NoFileOptions)}`); } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); + const inputs = { + artifactName: name, + searchPath: path, + ifNoFilesFound: noFileBehavior + }; + const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays); + if (retentionDaysStr) { + inputs.retentionDays = parseInt(retentionDaysStr); + if (isNaN(inputs.retentionDays)) { + core.setFailed('Invalid retention-days'); + } } + return inputs; +} +exports.getInputs = getInputs; + + +/***/ }), + +/***/ 630: +/***/ (function(module) { + +module.exports = require("perf_hooks"); + +/***/ }), + +/***/ 631: +/***/ (function(module) { + +module.exports = require("net"); + +/***/ }), + +/***/ 669: +/***/ (function(module) { + +module.exports = require("util"); + +/***/ }), + +/***/ 676: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const command_1 = __webpack_require__(936); +const file_command_1 = __webpack_require__(424); +const utils_1 = __webpack_require__(615); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + * A code indicating that the action was successful */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } + ExitCode[ExitCode["Success"] = 0] = "Success"; /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch + * A code indicating that the action was a failure */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; + file_command_1.issueCommand('ENV', commandValue); } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); + else { + command_1.issueCommand('set-env', { name }, convertedVal); } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueCommand('PATH', inputPath); } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __webpack_require__(988); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - ...((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - }), - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; + else { + command_1.issueCommand('add-path', {}, inputPath); } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - }); + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); } + return val.trim(); } -exports.HttpClient = HttpClient; - - -/***/ }), - -/***/ 553: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -exports.checkArtifactFilePath = exports.checkArtifactName = void 0; -const core_1 = __webpack_require__(470); +exports.getInput = getInput; /** - * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected - * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain - * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an - * individual filesystem/platform will not be supported on all fileSystems/platforms + * Sets the value of an output. * - * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ -const invalidArtifactFilePathCharacters = new Map([ - ['"', ' Double quote "'], - [':', ' Colon :'], - ['<', ' Less than <'], - ['>', ' Greater than >'], - ['|', ' Vertical bar |'], - ['*', ' Asterisk *'], - ['?', ' Question mark ?'], - ['\r', ' Carriage return \\r'], - ['\n', ' Line feed \\n'] -]); -const invalidArtifactNameCharacters = new Map([ - ...invalidArtifactFilePathCharacters, - ['\\', ' Backslash \\'], - ['/', ' Forward slash /'] -]); +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; /** - * Scans the name of the artifact to make sure there are no illegal characters + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * */ -function checkArtifactName(name) { - if (!name) { - throw new Error(`Artifact name: ${name}, is incorrectly provided`); - } - for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) { - if (name.includes(invalidCharacterKey)) { - throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter} - -Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()} - -These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); - } - } - core_1.info(`Artifact name is valid!`); +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); } -exports.checkArtifactName = checkArtifactName; +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- /** - * Scans the name of the filePath used to make sure there are no illegal characters + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message */ -function checkArtifactFilePath(path) { - if (!path) { - throw new Error(`Artifact path: ${path}, is incorrectly provided`); - } - for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter} - -Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} - -The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. - `); +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + */ +function error(message) { + command_1.issue('error', message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message. Errors will be converted to string via toString() + */ +function warning(message) { + command_1.issue('warning', message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); } - } + finally { + endGroup(); + } + return result; + }); } -exports.checkArtifactFilePath = checkArtifactFilePath; -//# sourceMappingURL=path-and-artifact-name-validation.js.map +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map /***/ }), -/***/ 575: +/***/ 682: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { @@ -6551,1241 +6786,1420 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const glob = __importStar(__webpack_require__(281)); -const path = __importStar(__webpack_require__(622)); -const core_1 = __webpack_require__(470); -const fs_1 = __webpack_require__(747); -const path_1 = __webpack_require__(622); -const util_1 = __webpack_require__(669); -const stats = util_1.promisify(fs_1.stat); -function getDefaultGlobOptions() { - return { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; -} -/** - * If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as - * the delimiter to control the directory structure for the artifact. This function returns the LCA - * when given an array of search paths - * - * Example 1: The patterns `/foo/` and `/bar/` returns `/` - * - * Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo` - */ -function getMultiPathLCA(searchPaths) { - if (searchPaths.length < 2) { - throw new Error('At least two search paths must be provided'); - } - const commonPaths = new Array(); - const splitPaths = new Array(); - let smallestPathLength = Number.MAX_SAFE_INTEGER; - // split each of the search paths using the platform specific separator - for (const searchPath of searchPaths) { - core_1.debug(`Using search path ${searchPath}`); - const splitSearchPath = path.normalize(searchPath).split(path.sep); - // keep track of the smallest path length so that we don't accidentally later go out of bounds - smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length); - splitPaths.push(splitSearchPath); - } - // on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it - if (searchPaths[0].startsWith(path.sep)) { - commonPaths.push(path.sep); - } - let splitIndex = 0; - // function to check if the paths are the same at a specific index - function isPathTheSame() { - const compare = splitPaths[0][splitIndex]; - for (let i = 1; i < splitPaths.length; i++) { - if (compare !== splitPaths[i][splitIndex]) { - // a non-common index has been reached - return false; - } - } - return true; - } - // loop over all the search paths until there is a non-common ancestor or we go out of bounds - while (splitIndex < smallestPathLength) { - if (!isPathTheSame()) { - break; - } - // if all are the same, add to the end result & increment the index - commonPaths.push(splitPaths[0][splitIndex]); - splitIndex++; - } - return path.join(...commonPaths); -} -function findFilesToUpload(searchPath, globOptions) { +exports.retryHttpClientRequest = exports.retry = void 0; +const utils_1 = __webpack_require__(128); +const core = __importStar(__webpack_require__(676)); +const config_variables_1 = __webpack_require__(750); +function retry(name, operation, customErrorMessages, maxAttempts) { return __awaiter(this, void 0, void 0, function* () { - const searchResults = []; - const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions()); - const rawSearchResults = yield globber.glob(); - /* - Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten - Detect any files that could be overwritten for user awareness - */ - const set = new Set(); - /* - Directories will be rejected if attempted to be uploaded. This includes just empty - directories so filter any directories out from the raw search results - */ - for (const searchResult of rawSearchResults) { - const fileStats = yield stats(searchResult); - // isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead - if (!fileStats.isDirectory()) { - core_1.debug(`File:${searchResult} was found using the provided searchPath`); - searchResults.push(searchResult); - // detect any files that would be overwritten because of case insensitivity - if (set.has(searchResult.toLowerCase())) { - core_1.info(`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`); + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + let errorMessage = ''; + let customErrorInformation = undefined; + let attempt = 1; + while (attempt <= maxAttempts) { + try { + response = yield operation(); + statusCode = response.message.statusCode; + if (utils_1.isSuccessStatusCode(statusCode)) { + return response; } - else { - set.add(searchResult.toLowerCase()); + // Extra error information that we want to display if a particular response code is hit + if (statusCode) { + customErrorInformation = customErrorMessages.get(statusCode); } + isRetryable = utils_1.isRetryableStatusCode(statusCode); + errorMessage = `Artifact service responded with ${statusCode}`; } - else { - core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`); + catch (error) { + isRetryable = true; + errorMessage = error.message; } + if (!isRetryable) { + core.info(`${name} - Error is not retryable`); + if (response) { + utils_1.displayHttpDiagnostics(response); + } + break; + } + core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt)); + attempt++; } - // Calculate the root directory for the artifact using the search paths that were utilized - const searchPaths = globber.getSearchPaths(); - if (searchPaths.length > 1) { - core_1.info(`Multiple search paths detected. Calculating the least common ancestor of all paths`); - const lcaSearchPath = getMultiPathLCA(searchPaths); - core_1.info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`); - return { - filesToUpload: searchResults, - rootDirectory: lcaSearchPath - }; + if (response) { + utils_1.displayHttpDiagnostics(response); } - /* - Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is - not preserved and the root directory will be the single files parent directory - */ - if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) { - return { - filesToUpload: searchResults, - rootDirectory: path_1.dirname(searchResults[0]) - }; + if (customErrorInformation) { + throw Error(`${name} failed: ${customErrorInformation}`); } - return { - filesToUpload: searchResults, - rootDirectory: searchPaths[0] - }; + throw Error(`${name} failed: ${errorMessage}`); }); } -exports.findFilesToUpload = findFilesToUpload; +exports.retry = retry; +function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, customErrorMessages, maxAttempts); + }); +} +exports.retryHttpClientRequest = retryHttpClientRequest; +//# sourceMappingURL=requestUtils.js.map + +/***/ }), + +/***/ 691: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var wrappy = __webpack_require__(50) +var reqs = Object.create(null) +var once = __webpack_require__(731) + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} /***/ }), -/***/ 583: +/***/ 707: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const core = __importStar(__webpack_require__(470)); -const constants_1 = __webpack_require__(694); +exports.create = void 0; +const artifact_client_1 = __webpack_require__(390); /** - * Helper to get all the inputs for the action + * Constructs an ArtifactClient */ -function getInputs() { - const name = core.getInput(constants_1.Inputs.Name); - const path = core.getInput(constants_1.Inputs.Path, { required: true }); - const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound); - const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound]; - if (!noFileBehavior) { - core.setFailed(`Unrecognized ${constants_1.Inputs.IfNoFilesFound} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(constants_1.NoFileOptions)}`); - } - const inputs = { - artifactName: name, - searchPath: path, - ifNoFilesFound: noFileBehavior - }; - const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays); - if (retentionDaysStr) { - inputs.retentionDays = parseInt(retentionDaysStr); - if (isNaN(inputs.retentionDays)) { - core.setFailed('Invalid retention-days'); - } - } - return inputs; +function create() { + return artifact_client_1.DefaultArtifactClient.create(); } -exports.getInputs = getInputs; +exports.create = create; +//# sourceMappingURL=artifact-client.js.map + +/***/ }), + +/***/ 719: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const { promisify } = __webpack_require__(669); +const tmp = __webpack_require__(26); + +// file +module.exports.fileSync = tmp.fileSync; +const fileWithOptions = promisify((options, cb) => + tmp.file(options, (err, path, fd, cleanup) => + err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) }) + ) +); +module.exports.file = async (options) => fileWithOptions(options); + +module.exports.withFile = async function withFile(fn, options) { + const { path, fd, cleanup } = await module.exports.file(options); + try { + return await fn({ path, fd }); + } finally { + await cleanup(); + } +}; + + +// directory +module.exports.dirSync = tmp.dirSync; +const dirWithOptions = promisify((options, cb) => + tmp.dir(options, (err, path, cleanup) => + err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) }) + ) +); +module.exports.dir = async (options) => dirWithOptions(options); + +module.exports.withDir = async function withDir(fn, options) { + const { path, cleanup } = await module.exports.dir(options); + try { + return await fn({ path }); + } finally { + await cleanup(); + } +}; + + +// name generation +module.exports.tmpNameSync = tmp.tmpNameSync; +module.exports.tmpName = promisify(tmp.tmpName); + +module.exports.tmpdir = tmp.tmpdir; + +module.exports.setGracefulCleanup = tmp.setGracefulCleanup; /***/ }), -/***/ 590: +/***/ 722: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -exports.getUploadSpecification = void 0; -const fs = __importStar(__webpack_require__(747)); -const core_1 = __webpack_require__(470); -const path_1 = __webpack_require__(622); -const path_and_artifact_name_validation_1 = __webpack_require__(553); +exports.StatusReporter = void 0; +const core_1 = __webpack_require__(676); /** - * Creates a specification that describes how each file that is part of the artifact will be uploaded - * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server - * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file - * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded + * + * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable + * The total status of the upload/download gets displayed according to this value + * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function */ -function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { - // artifact name was checked earlier on, no need to check again - const specifications = []; - if (!fs.existsSync(rootDirectory)) { - throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); +class StatusReporter { + constructor(displayFrequencyInMilliseconds) { + this.totalNumberOfFilesToProcess = 0; + this.processedCount = 0; + this.largeFiles = new Map(); + this.totalFileStatus = undefined; + this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; } - if (!fs.lstatSync(rootDirectory).isDirectory()) { - throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); + setTotalNumberOfFilesToProcess(fileTotal) { + this.totalNumberOfFilesToProcess = fileTotal; + this.processedCount = 0; } - // Normalize and resolve, this allows for either absolute or relative paths to be used - rootDirectory = path_1.normalize(rootDirectory); - rootDirectory = path_1.resolve(rootDirectory); - /* - Example to demonstrate behavior - - Input: - artifactName: my-artifact - rootDirectory: '/home/user/files/plz-upload' - artifactFiles: [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ] - - Output: - specifications: [ - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] - ] - */ - for (let file of artifactFiles) { - if (!fs.existsSync(file)) { - throw new Error(`File ${file} does not exist`); - } - if (!fs.lstatSync(file).isDirectory()) { - // Normalize and resolve, this allows for either absolute or relative paths to be used - file = path_1.normalize(file); - file = path_1.resolve(file); - if (!file.startsWith(rootDirectory)) { - throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); - } - // Check for forbidden characters in file paths that will be rejected during upload - const uploadPath = file.replace(rootDirectory, ''); - path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); - /* - uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all - be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts - - path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt - join('artifact-name/', 'file-to-upload.txt') - join('artifact-name/', '/file-to-upload.txt') - join('artifact-name', 'file-to-upload.txt') - join('artifact-name', '/file-to-upload.txt') - */ - specifications.push({ - absoluteFilePath: file, - uploadFilePath: path_1.join(artifactName, uploadPath) - }); - } - else { - // Directories are rejected by the server during upload - core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); - } + start() { + // displays information about the total upload/download status + this.totalFileStatus = setInterval(() => { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); + core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); + }, this.displayFrequencyInMilliseconds); } - return specifications; -} -exports.getUploadSpecification = getUploadSpecification; -//# sourceMappingURL=upload-specification.js.map - -/***/ }), - -/***/ 597: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const pathHelper = __webpack_require__(972); -const internal_match_kind_1 = __webpack_require__(327); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Given an array of patterns, returns an array of paths to search. - * Duplicates and paths under other included paths are filtered out. - */ -function getSearchPaths(patterns) { - // Ignore negate patterns - patterns = patterns.filter(x => !x.negate); - // Create a map of all search paths - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - searchPathMap[key] = 'candidate'; + // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload + updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); + core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); } - const result = []; - for (const pattern of patterns) { - // Check if already included - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - if (searchPathMap[key] === 'included') { - continue; - } - // Check for an ancestor search path - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; - } - tempKey = parent; - parent = pathHelper.dirname(tempKey); - } - // Include the search pattern in the result - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = 'included'; + stop() { + if (this.totalFileStatus) { + clearInterval(this.totalFileStatus); } } - return result; -} -exports.getSearchPaths = getSearchPaths; -/** - * Matches the patterns against the path - */ -function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); - } - else { - result |= pattern.match(itemPath); - } + incrementProcessedCount() { + this.processedCount++; + } + formatPercentage(numerator, denominator) { + // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed + return ((numerator / denominator) * 100).toFixed(4).toString(); } - return result; -} -exports.match = match; -/** - * Checks whether to descend further into the directory - */ -function partialMatch(patterns, itemPath) { - return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } -exports.partialMatch = partialMatch; -//# sourceMappingURL=internal-pattern-helper.js.map +exports.StatusReporter = StatusReporter; +//# sourceMappingURL=status-reporter.js.map /***/ }), -/***/ 601: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; +/***/ 727: +/***/ (function(module, __unusedexports, __webpack_require__) { -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __webpack_require__(470); -/** - * Returns a copy with defaults filled in. - */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } - } - return result; -} -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map +module.exports = minimatch +minimatch.Minimatch = Minimatch -/***/ }), +var path = { sep: '/' } +try { + path = __webpack_require__(622) +} catch (er) {} -/***/ 605: -/***/ (function(module) { +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __webpack_require__(171) -module.exports = require("http"); +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} -/***/ }), +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' -/***/ 608: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +// * => any number of characters +var star = qmark + '*?' -"use strict"; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.UploadHttpClient = void 0; -const fs = __importStar(__webpack_require__(747)); -const core = __importStar(__webpack_require__(470)); -const tmp = __importStar(__webpack_require__(875)); -const stream = __importStar(__webpack_require__(413)); -const utils_1 = __webpack_require__(870); -const config_variables_1 = __webpack_require__(401); -const util_1 = __webpack_require__(669); -const url_1 = __webpack_require__(835); -const perf_hooks_1 = __webpack_require__(630); -const status_reporter_1 = __webpack_require__(176); -const http_client_1 = __webpack_require__(539); -const http_manager_1 = __webpack_require__(452); -const upload_gzip_1 = __webpack_require__(647); -const requestUtils_1 = __webpack_require__(489); -const stat = util_1.promisify(fs.stat); -class UploadHttpClient { - constructor() { - this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); - this.statusReporter = new status_reporter_1.StatusReporter(10000); +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} } - /** - * Creates a file container for the new artifact in the remote blob storage/file service - * @param {string} artifactName Name of the artifact being created - * @returns The response from the Artifact Service if the file container was successfully created - */ - createArtifactInFileContainer(artifactName, options) { - return __awaiter(this, void 0, void 0, function* () { - const parameters = { - Type: 'actions_storage', - Name: artifactName - }; - // calculate retention period - if (options && options.retentionDays) { - const maxRetentionStr = config_variables_1.getRetentionDays(); - parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr); - } - const data = JSON.stringify(parameters, null, 2); - const artifactUrl = utils_1.getArtifactUrl(); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); - // Extra information to display when a particular HTTP code is returned - // If a 403 is returned when trying to create a file container, the customer has exceeded - // their storage quota so no new artifact containers can be created - const customErrorMessages = new Map([ - [ - http_client_1.HttpCodes.Forbidden, - 'Artifact storage quota has been hit. Unable to upload any new artifacts' - ], - [ - http_client_1.HttpCodes.BadRequest, - `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` - ] - ]); - const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); - const body = yield response.readBody(); - return JSON.parse(body); - }); - } - /** - * Concurrently upload all of the files in chunks - * @param {string} uploadUrl Base Url for the artifact that was created - * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded - * @returns The size of all the files uploaded in bytes - */ - uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { - return __awaiter(this, void 0, void 0, function* () { - const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); - const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); - core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); - const parameters = []; - // by default, file uploads will continue if there is an error unless specified differently in the options - let continueOnError = true; - if (options) { - if (options.continueOnError === false) { - continueOnError = false; - } - } - // prepare the necessary parameters to upload all the files - for (const file of filesToUpload) { - const resourceUrl = new url_1.URL(uploadUrl); - resourceUrl.searchParams.append('itemPath', file.uploadFilePath); - parameters.push({ - file: file.absoluteFilePath, - resourceUrl: resourceUrl.toString(), - maxChunkSize: MAX_CHUNK_SIZE, - continueOnError - }); - } - const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; - const failedItemsToReport = []; - let currentFile = 0; - let completedFiles = 0; - let uploadFileSize = 0; - let totalFileSize = 0; - let abortPendingFileUploads = false; - this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); - this.statusReporter.start(); - // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors - yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { - while (currentFile < filesToUpload.length) { - const currentFileParameters = parameters[currentFile]; - currentFile += 1; - if (abortPendingFileUploads) { - failedItemsToReport.push(currentFileParameters.file); - continue; - } - const startTime = perf_hooks_1.performance.now(); - const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core.isDebug()) { - core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); - } - uploadFileSize += uploadFileResult.successfulUploadSize; - totalFileSize += uploadFileResult.totalSize; - if (uploadFileResult.isSuccess === false) { - failedItemsToReport.push(currentFileParameters.file); - if (!continueOnError) { - // fail fast - core.error(`aborting artifact upload`); - abortPendingFileUploads = true; - } - } - this.statusReporter.incrementProcessedCount(); - } - }))); - this.statusReporter.stop(); - // done uploading, safety dispose all connections - this.uploadHttpManager.disposeAndReplaceAllClients(); - core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); - return { - uploadSize: uploadFileSize, - totalSize: totalFileSize, - failedItems: failedItemsToReport - }; - }); - } - /** - * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. - * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls - * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls - * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded - * @returns The size of the file that was uploaded in bytes along with any failed uploads - */ - uploadFileAsync(httpClientIndex, parameters) { - return __awaiter(this, void 0, void 0, function* () { - const fileStat = yield stat(parameters.file); - const totalFileSize = fileStat.size; - const isFIFO = fileStat.isFIFO(); - let offset = 0; - let isUploadSuccessful = true; - let failedChunkSizes = 0; - let uploadFileSize = 0; - let isGzip = true; - // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O - // for creating a new GZip file, an in-memory buffer is used for compression - // with named pipes the file size is reported as zero in that case don't read the file in memory - if (!isFIFO && totalFileSize < 65536) { - core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); - const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); - // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, - // it will not properly get reset to the start of the stream if a chunk upload needs to be retried - let openUploadStream; - if (totalFileSize < buffer.byteLength) { - // compression did not help with reducing the size, use a readable stream from the original file for upload - core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - openUploadStream = () => fs.createReadStream(parameters.file); - isGzip = false; - uploadFileSize = totalFileSize; - } - else { - // create a readable stream using a PassThrough stream that is both readable and writable - core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); - openUploadStream = () => { - const passThrough = new stream.PassThrough(); - passThrough.end(buffer); - return passThrough; - }; - uploadFileSize = buffer.byteLength; - } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); - if (!result) { - // chunk failed to upload - isUploadSuccessful = false; - failedChunkSizes += uploadFileSize; - core.warning(`Aborting upload for ${parameters.file} due to failure`); - } - return { - isSuccess: isUploadSuccessful, - successfulUploadSize: uploadFileSize - failedChunkSizes, - totalSize: totalFileSize - }; - } - else { - // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the - // npm tmp-promise package and this file gets used to create a GZipped file - const tempFile = yield tmp.file(); - core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); - // create a GZip file of the original file being uploaded, the original file should not be modified in any way - uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); - let uploadFilePath = tempFile.path; - // compression did not help with size reduction, use the original file for upload and delete the temp GZip file - // for named pipes totalFileSize is zero, this assumes compression did help - if (!isFIFO && totalFileSize < uploadFileSize) { - core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - uploadFileSize = totalFileSize; - uploadFilePath = parameters.file; - isGzip = false; - } - else { - core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); - } - let abortFileUpload = false; - // upload only a single chunk at a time - while (offset < uploadFileSize) { - const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); - const startChunkIndex = offset; - const endChunkIndex = offset + chunkSize - 1; - offset += parameters.maxChunkSize; - if (abortFileUpload) { - // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed - failedChunkSizes += chunkSize; - continue; - } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { - start: startChunkIndex, - end: endChunkIndex, - autoClose: false - }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize); - if (!result) { - // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was - // successfully uploaded so the server may report a different size for what was uploaded - isUploadSuccessful = false; - failedChunkSizes += chunkSize; - core.warning(`Aborting upload for ${parameters.file} due to failure`); - abortFileUpload = true; - } - else { - // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status - if (uploadFileSize > 8388608) { - this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize); - } - } - } - // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by - // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about - core.debug(`deleting temporary gzip file ${tempFile.path}`); - yield tempFile.cleanup(); - return { - isSuccess: isUploadSuccessful, - successfulUploadSize: uploadFileSize - failedChunkSizes, - totalSize: totalFileSize - }; - } - }); - } - /** - * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code - * indicates a retryable status, we try to upload the chunk as well - * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls - * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to - * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded - * @param {number} start Starting byte index of file that the chunk belongs to - * @param {number} end Ending byte index of file that the chunk belongs to - * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded - * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream - * @param {number} totalFileSize Original total size of the file that is being uploaded - * @returns if the chunk was successfully uploaded - */ - uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { - return __awaiter(this, void 0, void 0, function* () { - // prepare all the necessary headers before making any http call - const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize)); - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - const client = this.uploadHttpManager.getClient(httpClientIndex); - return yield client.sendStream('PUT', resourceUrl, openStream(), headers); - }); - let retryCount = 0; - const retryLimit = config_variables_1.getRetryLimit(); - // Increments the current retry count and then checks if the retry limit has been reached - // If there have been too many retries, fail so the download stops - const incrementAndCheckRetryLimit = (response) => { - retryCount++; - if (retryCount > retryLimit) { - if (response) { - utils_1.displayHttpDiagnostics(response); - } - core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); - return true; - } - return false; - }; - const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { - this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); - if (retryAfterValue) { - core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); - yield utils_1.sleep(retryAfterValue); - } - else { - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); - core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); - yield utils_1.sleep(backoffTime); - } - core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); - return; - }); - // allow for failed chunks to be retried multiple times - while (retryCount <= retryLimit) { - let response; - try { - response = yield uploadChunkRequest(); - } - catch (error) { - // if an error is caught, it is usually indicative of a timeout so retry the upload - core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); - // eslint-disable-next-line no-console - console.log(error); - if (incrementAndCheckRetryLimit()) { - return false; - } - yield backOff(); - continue; - } - // Always read the body of the response. There is potential for a resource leak if the body is not read which will - // result in the connection remaining open along with unintended consequences when trying to dispose of the client - yield response.readBody(); - if (utils_1.isSuccessStatusCode(response.message.statusCode)) { - return true; - } - else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { - core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); - if (incrementAndCheckRetryLimit(response)) { - return false; - } - utils_1.isThrottledStatusCode(response.message.statusCode) - ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) - : yield backOff(); - } - else { - core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); - utils_1.displayHttpDiagnostics(response); - return false; - } - } - return false; - }); - } - /** - * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. - * Updating the size indicates that we are done uploading all the contents of the artifact - */ - patchArtifactSize(size, artifactName) { - return __awaiter(this, void 0, void 0, function* () { - const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); - resourceUrl.searchParams.append('artifactName', artifactName); - const parameters = { Size: size }; - const data = JSON.stringify(parameters, null, 2); - core.debug(`URL is ${resourceUrl.toString()}`); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); - // Extra information to display when a particular HTTP code is returned - const customErrorMessages = new Map([ - [ - http_client_1.HttpCodes.NotFound, - `An Artifact with the name ${artifactName} was not found` - ] - ]); - // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this - const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); - yield response.readBody(); - core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); - }); + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false } -} -exports.UploadHttpClient = UploadHttpClient; -//# sourceMappingURL=upload-http-client.js.map + } -/***/ }), + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) -/***/ 614: -/***/ (function(module) { + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } -module.exports = require("events"); + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false -/***/ }), + case '\\': + clearStateChar() + escaping = true + continue -/***/ 621: -/***/ (function(module) { + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) -"use strict"; + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue - var r = range(a, b, str); + case '(': + if (inClass) { + re += '(' + continue + } - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} + if (!stateChar) { + re += '\\(' + continue + } -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } - if (ai >= 0 && bi > 0) { - begs = []; - left = str.length; + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] - bi = str.indexOf(b, i + 1); - } + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) - i = ai < bi && ai >= 0 ? ai : bi; + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') } + nlAfter = cleanAfter - if (begs.length) { - result = [ left, right ]; + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe } - return result; -} - - -/***/ }), - -/***/ 622: -/***/ (function(module) { + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } -module.exports = require("path"); + if (addPatternStart) { + re = patternStart + re + } -/***/ }), + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } -/***/ 630: -/***/ (function(module) { + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } -module.exports = require("perf_hooks"); + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } -/***/ }), + regExp._glob = pattern + regExp._src = re -/***/ 631: -/***/ (function(module) { + return regExp +} -module.exports = require("net"); +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} -/***/ }), +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp -/***/ 647: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set -"use strict"; + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0; -const fs = __importStar(__webpack_require__(747)); -const zlib = __importStar(__webpack_require__(761)); -const util_1 = __webpack_require__(669); -const stat = util_1.promisify(fs.stat); -/** - * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip - * files then will just waste a lot of time before ultimately being discarded (especially for very large files). - * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is - */ -const gzipExemptFileExtensions = [ - '.gzip', - '.zip', - '.tar.lz', - '.tar.gz', - '.tar.bz2', - '.7z' -]; -/** - * Creates a Gzip compressed file of an original file at the provided temporary filepath location - * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified - * @param {string} tempFilePath the location of where the Gzip file will be created - * @returns the size of gzip file that gets created - */ -function createGZipFileOnDisk(originalFilePath, tempFilePath) { - return __awaiter(this, void 0, void 0, function* () { - for (const gzipExemptExtension of gzipExemptFileExtensions) { - if (originalFilePath.endsWith(gzipExemptExtension)) { - // return a really large number so that the original file gets uploaded - return Number.MAX_SAFE_INTEGER; - } - } - return new Promise((resolve, reject) => { - const inputStream = fs.createReadStream(originalFilePath); - const gzip = zlib.createGzip(); - const outputStream = fs.createWriteStream(tempFilePath); - inputStream.pipe(gzip).pipe(outputStream); - outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { - // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload - const size = (yield stat(tempFilePath)).size; - resolve(size); - })); - outputStream.on('error', error => { - // eslint-disable-next-line no-console - console.log(error); - reject; - }); - }); - }); -} -exports.createGZipFileOnDisk = createGZipFileOnDisk; -/** - * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O - * @param originalFilePath the path to the original file that is being GZipped - * @returns a buffer with the GZip file - */ -function createGZipFileInBuffer(originalFilePath) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - var e_1, _a; - const inputStream = fs.createReadStream(originalFilePath); - const gzip = zlib.createGzip(); - inputStream.pipe(gzip); - // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 - const chunks = []; - try { - for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { - const chunk = gzip_1_1.value; - chunks.push(chunk); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); - } - finally { if (e_1) throw e_1.error; } - } - resolve(Buffer.concat(chunks)); - })); - }); + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp } -exports.createGZipFileInBuffer = createGZipFileInBuffer; -//# sourceMappingURL=upload-gzip.js.map -/***/ }), +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} -/***/ 669: -/***/ (function(module) { +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' -module.exports = require("util"); + if (f === '/' && partial) return true -/***/ }), + var options = this.options -/***/ 674: -/***/ (function(module, __unusedexports, __webpack_require__) { + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } -var wrappy = __webpack_require__(11) -var reqs = Object.create(null) -var once = __webpack_require__(49) + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) -module.exports = wrappy(inflight) + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. -function inflight (key, cb) { - if (reqs[key]) { - reqs[key].push(cb) - return null - } else { - reqs[key] = [cb] - return makeres(key) + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate } -function makeres (key) { - return once(function RES () { - var cbs = reqs[key] - var len = cbs.length - var args = slice(arguments) +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } - // XXX It's somewhat ambiguous whether a new callback added in this - // pass should be queued for later execution if something in the - // list of callbacks throws, or if it should just be discarded. - // However, it's such an edge case that it hardly matters, and either - // choice is likely as surprising as the other. - // As it happens, we do go ahead and schedule it for later execution. - try { - for (var i = 0; i < len; i++) { - cbs[i].apply(null, args) - } - } finally { - if (cbs.length > len) { - // added more in the interim. - // de-zalgo, just in case, but don't call again. - cbs.splice(0, len) - process.nextTick(function () { - RES.apply(null, args) - }) - } else { - delete reqs[key] - } - } - }) + // should be unreachable. + throw new Error('wtf?') } -function slice (args) { - var length = args.length - var array = [] +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} - for (var i = 0; i < length; i++) array[i] = args[i] - return array +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') } /***/ }), -/***/ 681: -/***/ (function(module) { - -"use strict"; +/***/ 731: +/***/ (function(module, __unusedexports, __webpack_require__) { +var wrappy = __webpack_require__(50) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) -function posix(path) { - return path.charAt(0) === '/'; -} +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) -function win32(path) { - // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path); - var device = result[1] || ''; - var isUnc = Boolean(device && device.charAt(1) !== ':'); + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) - // UNC paths are always absolute - return Boolean(result[2] || isUnc); +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f } -module.exports = process.platform === 'win32' ? win32 : posix; -module.exports.posix = posix; -module.exports.win32 = win32; +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} /***/ }), -/***/ 689: -/***/ (function(module, __unusedexports, __webpack_require__) { - -try { - var util = __webpack_require__(669); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __webpack_require__(315); -} +/***/ 747: +/***/ (function(module) { +module.exports = require("fs"); /***/ }), -/***/ 694: +/***/ 750: /***/ (function(__unusedmodule, exports) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -var Inputs; -(function (Inputs) { - Inputs["Name"] = "name"; - Inputs["Path"] = "path"; - Inputs["IfNoFilesFound"] = "if-no-files-found"; - Inputs["RetentionDays"] = "retention-days"; -})(Inputs = exports.Inputs || (exports.Inputs = {})); -var NoFileOptions; -(function (NoFileOptions) { - /** - * Default. Output a warning but do not fail the action - */ - NoFileOptions["warn"] = "warn"; - /** - * Fail the action with an error message - */ - NoFileOptions["error"] = "error"; - /** - * Do not output any warnings or errors, the action does not fail - */ - NoFileOptions["ignore"] = "ignore"; -})(NoFileOptions = exports.NoFileOptions || (exports.NoFileOptions = {})); +exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; +// The number of concurrent uploads that happens at the same time +function getUploadFileConcurrency() { + return 2; +} +exports.getUploadFileConcurrency = getUploadFileConcurrency; +// When uploading large files that can't be uploaded with a single http call, this controls +// the chunk size that is used during upload +function getUploadChunkSize() { + return 8 * 1024 * 1024; // 8 MB Chunks +} +exports.getUploadChunkSize = getUploadChunkSize; +// The maximum number of retries that can be attempted before an upload or download fails +function getRetryLimit() { + return 5; +} +exports.getRetryLimit = getRetryLimit; +// With exponential backoff, the larger the retry count, the larger the wait time before another attempt +// The retry multiplier controls by how much the backOff time increases depending on the number of retries +function getRetryMultiplier() { + return 1.5; +} +exports.getRetryMultiplier = getRetryMultiplier; +// The initial wait time if an upload or download fails and a retry is being attempted for the first time +function getInitialRetryIntervalInMilliseconds() { + return 3000; +} +exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; +// The number of concurrent downloads that happens at the same time +function getDownloadFileConcurrency() { + return 2; +} +exports.getDownloadFileConcurrency = getDownloadFileConcurrency; +function getRuntimeToken() { + const token = process.env['ACTIONS_RUNTIME_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); + } + return token; +} +exports.getRuntimeToken = getRuntimeToken; +function getRuntimeUrl() { + const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); + } + return runtimeUrl; +} +exports.getRuntimeUrl = getRuntimeUrl; +function getWorkFlowRunId() { + const workFlowRunId = process.env['GITHUB_RUN_ID']; + if (!workFlowRunId) { + throw new Error('Unable to get GITHUB_RUN_ID env variable'); + } + return workFlowRunId; +} +exports.getWorkFlowRunId = getWorkFlowRunId; +function getWorkSpaceDirectory() { + const workspaceDirectory = process.env['GITHUB_WORKSPACE']; + if (!workspaceDirectory) { + throw new Error('Unable to get GITHUB_WORKSPACE env variable'); + } + return workspaceDirectory; +} +exports.getWorkSpaceDirectory = getWorkSpaceDirectory; +function getRetentionDays() { + return process.env['GITHUB_RETENTION_DAYS']; +} +exports.getRetentionDays = getRetentionDays; +//# sourceMappingURL=config-variables.js.map + +/***/ }), + +/***/ 761: +/***/ (function(module) { +module.exports = require("zlib"); /***/ }), -/***/ 728: -/***/ (function(__unusedmodule, exports) { +/***/ 799: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; Object.defineProperty(exports, "__esModule", { value: true }); -class SearchState { - constructor(path, level) { - this.path = path; - this.level = level; - } +const core = __importStar(__webpack_require__(676)); +const artifact_1 = __webpack_require__(707); +const search_1 = __webpack_require__(923); +const input_helper_1 = __webpack_require__(628); +const constants_1 = __webpack_require__(858); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const inputs = input_helper_1.getInputs(); + const searchResult = yield search_1.findFilesToUpload(inputs.searchPath); + if (searchResult.filesToUpload.length === 0) { + // No files were found, different use cases warrant different types of behavior if nothing is found + switch (inputs.ifNoFilesFound) { + case constants_1.NoFileOptions.warn: { + core.warning(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); + break; + } + case constants_1.NoFileOptions.error: { + core.setFailed(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); + break; + } + case constants_1.NoFileOptions.ignore: { + core.info(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); + break; + } + } + } + else { + const s = searchResult.filesToUpload.length === 1 ? '' : 's'; + core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`); + core.debug(`Root artifact directory is ${searchResult.rootDirectory}`); + if (searchResult.filesToUpload.length > 10000) { + core.warning(`There are over 10,000 files in this artifact, consider creating an archive before upload to improve the upload performance.`); + } + const artifactClient = artifact_1.create(); + const options = { + continueOnError: false + }; + if (inputs.retentionDays) { + options.retentionDays = inputs.retentionDays; + } + const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options); + if (uploadResponse.failedItems.length > 0) { + core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`); + } + else { + core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`); + } + } + } + catch (err) { + core.setFailed(err.message); + } + }); } -exports.SearchState = SearchState; -//# sourceMappingURL=internal-search-state.js.map - -/***/ }), - -/***/ 747: -/***/ (function(module) { - -module.exports = require("fs"); - -/***/ }), - -/***/ 761: -/***/ (function(module) { - -module.exports = require("zlib"); - -/***/ }), - -/***/ 835: -/***/ (function(module) { +run(); -module.exports = require("url"); /***/ }), -/***/ 855: +/***/ 800: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -7819,1350 +8233,1374 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.DownloadHttpClient = void 0; +exports.UploadHttpClient = void 0; const fs = __importStar(__webpack_require__(747)); -const core = __importStar(__webpack_require__(470)); -const zlib = __importStar(__webpack_require__(761)); -const utils_1 = __webpack_require__(870); +const core = __importStar(__webpack_require__(676)); +const tmp = __importStar(__webpack_require__(719)); +const stream = __importStar(__webpack_require__(413)); +const utils_1 = __webpack_require__(128); +const config_variables_1 = __webpack_require__(750); +const util_1 = __webpack_require__(669); const url_1 = __webpack_require__(835); -const status_reporter_1 = __webpack_require__(176); const perf_hooks_1 = __webpack_require__(630); -const http_manager_1 = __webpack_require__(452); -const config_variables_1 = __webpack_require__(401); -const requestUtils_1 = __webpack_require__(489); -class DownloadHttpClient { +const status_reporter_1 = __webpack_require__(722); +const http_client_1 = __webpack_require__(582); +const http_manager_1 = __webpack_require__(851); +const upload_gzip_1 = __webpack_require__(139); +const requestUtils_1 = __webpack_require__(682); +const stat = util_1.promisify(fs.stat); +class UploadHttpClient { constructor() { - this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download'); - // downloads are usually significantly faster than uploads so display status information every second - this.statusReporter = new status_reporter_1.StatusReporter(1000); + this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); + this.statusReporter = new status_reporter_1.StatusReporter(10000); } /** - * Gets a list of all artifacts that are in a specific container + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created */ - listArtifacts() { + createArtifactInFileContainer(artifactName, options) { return __awaiter(this, void 0, void 0, function* () { + const parameters = { + Type: 'actions_storage', + Name: artifactName + }; + // calculate retention period + if (options && options.retentionDays) { + const maxRetentionStr = config_variables_1.getRetentionDays(); + parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr); + } + const data = JSON.stringify(parameters, null, 2); const artifactUrl = utils_1.getArtifactUrl(); // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.downloadHttpManager.getClient(0); - const headers = utils_1.getDownloadHeaders('application/json'); - const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); - const body = yield response.readBody(); - return JSON.parse(body); - }); - } - /** - * Fetches a set of container items that describe the contents of an artifact - * @param artifactName the name of the artifact - * @param containerUrl the artifact container URL for the run - */ - getContainerItems(artifactName, containerUrl) { - return __awaiter(this, void 0, void 0, function* () { - // the itemPath search parameter controls which containers will be returned - const resourceUrl = new url_1.URL(containerUrl); - resourceUrl.searchParams.append('itemPath', artifactName); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.downloadHttpManager.getClient(0); - const headers = utils_1.getDownloadHeaders('application/json'); - const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + // Extra information to display when a particular HTTP code is returned + // If a 403 is returned when trying to create a file container, the customer has exceeded + // their storage quota so no new artifact containers can be created + const customErrorMessages = new Map([ + [ + http_client_1.HttpCodes.Forbidden, + 'Artifact storage quota has been hit. Unable to upload any new artifacts' + ], + [ + http_client_1.HttpCodes.BadRequest, + `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` + ] + ]); + const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); const body = yield response.readBody(); return JSON.parse(body); }); } /** - * Concurrently downloads all the files that are part of an artifact - * @param downloadItems information about what items to download and where to save them + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes */ - downloadSingleArtifact(downloadItems) { + uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { return __awaiter(this, void 0, void 0, function* () { - const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency(); - // limit the number of files downloaded at a single time - core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); - const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; + const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); + const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); + core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parameters = []; + // by default, file uploads will continue if there is an error unless specified differently in the options + let continueOnError = true; + if (options) { + if (options.continueOnError === false) { + continueOnError = false; + } + } + // prepare the necessary parameters to upload all the files + for (const file of filesToUpload) { + const resourceUrl = new url_1.URL(uploadUrl); + resourceUrl.searchParams.append('itemPath', file.uploadFilePath); + parameters.push({ + file: file.absoluteFilePath, + resourceUrl: resourceUrl.toString(), + maxChunkSize: MAX_CHUNK_SIZE, + continueOnError + }); + } + const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; + const failedItemsToReport = []; let currentFile = 0; - let downloadedFiles = 0; - core.info(`Total number of files that will be downloaded: ${downloadItems.length}`); - this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); + let completedFiles = 0; + let uploadFileSize = 0; + let totalFileSize = 0; + let abortPendingFileUploads = false; + this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); this.statusReporter.start(); - yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () { - while (currentFile < downloadItems.length) { - const currentFileToDownload = downloadItems[currentFile]; + // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors + yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < filesToUpload.length) { + const currentFileParameters = parameters[currentFile]; currentFile += 1; + if (abortPendingFileUploads) { + failedItemsToReport.push(currentFileParameters.file); + continue; + } const startTime = perf_hooks_1.performance.now(); - yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); + const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); if (core.isDebug()) { - core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); + core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + } + uploadFileSize += uploadFileResult.successfulUploadSize; + totalFileSize += uploadFileResult.totalSize; + if (uploadFileResult.isSuccess === false) { + failedItemsToReport.push(currentFileParameters.file); + if (!continueOnError) { + // fail fast + core.error(`aborting artifact upload`); + abortPendingFileUploads = true; + } + } + this.statusReporter.incrementProcessedCount(); + } + }))); + this.statusReporter.stop(); + // done uploading, safety dispose all connections + this.uploadHttpManager.disposeAndReplaceAllClients(); + core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + return { + uploadSize: uploadFileSize, + totalSize: totalFileSize, + failedItems: failedItemsToReport + }; + }); + } + /** + * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. + * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ + uploadFileAsync(httpClientIndex, parameters) { + return __awaiter(this, void 0, void 0, function* () { + const fileStat = yield stat(parameters.file); + const totalFileSize = fileStat.size; + const isFIFO = fileStat.isFIFO(); + let offset = 0; + let isUploadSuccessful = true; + let failedChunkSizes = 0; + let uploadFileSize = 0; + let isGzip = true; + // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O + // for creating a new GZip file, an in-memory buffer is used for compression + // with named pipes the file size is reported as zero in that case don't read the file in memory + if (!isFIFO && totalFileSize < 65536) { + core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); + // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, + // it will not properly get reset to the start of the stream if a chunk upload needs to be retried + let openUploadStream; + if (totalFileSize < buffer.byteLength) { + // compression did not help with reducing the size, use a readable stream from the original file for upload + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + openUploadStream = () => fs.createReadStream(parameters.file); + isGzip = false; + uploadFileSize = totalFileSize; + } + else { + // create a readable stream using a PassThrough stream that is both readable and writable + core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + openUploadStream = () => { + const passThrough = new stream.PassThrough(); + passThrough.end(buffer); + return passThrough; + }; + uploadFileSize = buffer.byteLength; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // chunk failed to upload + isUploadSuccessful = false; + failedChunkSizes += uploadFileSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + } + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + else { + // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the + // npm tmp-promise package and this file gets used to create a GZipped file + const tempFile = yield tmp.file(); + core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + // create a GZip file of the original file being uploaded, the original file should not be modified in any way + uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); + let uploadFilePath = tempFile.path; + // compression did not help with size reduction, use the original file for upload and delete the temp GZip file + // for named pipes totalFileSize is zero, this assumes compression did help + if (!isFIFO && totalFileSize < uploadFileSize) { + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + uploadFileSize = totalFileSize; + uploadFilePath = parameters.file; + isGzip = false; + } + else { + core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + } + let abortFileUpload = false; + // upload only a single chunk at a time + while (offset < uploadFileSize) { + const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); + const startChunkIndex = offset; + const endChunkIndex = offset + chunkSize - 1; + offset += parameters.maxChunkSize; + if (abortFileUpload) { + // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed + failedChunkSizes += chunkSize; + continue; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { + start: startChunkIndex, + end: endChunkIndex, + autoClose: false + }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was + // successfully uploaded so the server may report a different size for what was uploaded + isUploadSuccessful = false; + failedChunkSizes += chunkSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + abortFileUpload = true; + } + else { + // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status + if (uploadFileSize > 8388608) { + this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize); + } } - this.statusReporter.incrementProcessedCount(); } - }))) - .catch(error => { - throw new Error(`Unable to download the artifact: ${error}`); - }) - .finally(() => { - this.statusReporter.stop(); - // safety dispose all connections - this.downloadHttpManager.disposeAndReplaceAllClients(); - }); + // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by + // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about + core.debug(`deleting temporary gzip file ${tempFile.path}`); + yield tempFile.cleanup(); + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } }); } /** - * Downloads an individual file - * @param httpClientIndex the index of the http client that is used to make all of the calls - * @param artifactLocation origin location where a file will be downloaded from - * @param downloadPath destination location for the file being downloaded + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded + * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream + * @param {number} totalFileSize Original total size of the file that is being uploaded + * @returns if the chunk was successfully uploaded */ - downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) { + uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { return __awaiter(this, void 0, void 0, function* () { + // open a new stream and read it to compute the digest + const digest = yield utils_1.digestForStream(openStream()); + // prepare all the necessary headers before making any http call + const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.uploadHttpManager.getClient(httpClientIndex); + return yield client.sendStream('PUT', resourceUrl, openStream(), headers); + }); let retryCount = 0; const retryLimit = config_variables_1.getRetryLimit(); - let destinationStream = fs.createWriteStream(downloadPath); - const headers = utils_1.getDownloadHeaders('application/json', true, true); - // a single GET request is used to download a file - const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () { - const client = this.downloadHttpManager.getClient(httpClientIndex); - return yield client.get(artifactLocation, headers); - }); - // check the response headers to determine if the file was compressed using gzip - const isGzip = (incomingHeaders) => { - return ('content-encoding' in incomingHeaders && - incomingHeaders['content-encoding'] === 'gzip'); - }; // Increments the current retry count and then checks if the retry limit has been reached - // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided, - // it will be used - const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + // If there have been too many retries, fail so the download stops + const incrementAndCheckRetryLimit = (response) => { retryCount++; if (retryCount > retryLimit) { - return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`)); - } - else { - this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); - if (retryAfterValue) { - // Back off by waiting the specified time denoted by the retry-after header - core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); - yield utils_1.sleep(retryAfterValue); - } - else { - // Back off using an exponential value that depends on the retry count - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); - core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); - yield utils_1.sleep(backoffTime); + if (response) { + utils_1.displayHttpDiagnostics(response); } - core.info(`Finished backoff for retry #${retryCount}, continuing with download`); - } - }); - const isAllBytesReceived = (expected, received) => { - // be lenient, if any input is missing, assume success, i.e. not truncated - if (!expected || - !received || - process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) { - core.info('Skipping download validation.'); + core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; } - return parseInt(expected) === received; + return false; }; - const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () { - destinationStream.close(); - yield utils_1.rmFile(fileDownloadPath); - destinationStream = fs.createWriteStream(fileDownloadPath); + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + yield utils_1.sleep(retryAfterValue); + } + else { + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + yield utils_1.sleep(backoffTime); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + return; }); - // keep trying to download a file until a retry limit has been reached + // allow for failed chunks to be retried multiple times while (retryCount <= retryLimit) { let response; try { - response = yield makeDownloadRequest(); + response = yield uploadChunkRequest(); } catch (error) { - // if an error is caught, it is usually indicative of a timeout so retry the download - core.info('An error occurred while attempting to download a file'); + // if an error is caught, it is usually indicative of a timeout so retry the upload + core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); // eslint-disable-next-line no-console console.log(error); - // increment the retryCount and use exponential backoff to wait before making the next request + if (incrementAndCheckRetryLimit()) { + return false; + } yield backOff(); continue; } - let forceRetry = false; + // Always read the body of the response. There is potential for a resource leak if the body is not read which will + // result in the connection remaining open along with unintended consequences when trying to dispose of the client + yield response.readBody(); if (utils_1.isSuccessStatusCode(response.message.statusCode)) { - // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string - // which can cause some gzip encoded data to be lost - // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents - try { - const isGzipped = isGzip(response.message.headers); - yield this.pipeResponseToFile(response, destinationStream, isGzipped); - if (isGzipped || - isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) { - return; - } - else { - forceRetry = true; - } - } - catch (error) { - // retry on error, most likely streams were corrupted - forceRetry = true; - } + return true; } - if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) { - core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); - resetDestinationStream(downloadPath); - // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff + else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + if (incrementAndCheckRetryLimit(response)) { + return false; + } utils_1.isThrottledStatusCode(response.message.statusCode) ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) : yield backOff(); } else { - // Some unexpected response code, fail immediately and stop the download + core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); utils_1.displayHttpDiagnostics(response); - return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`)); + return false; } } + return false; }); } /** - * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary - * @param response the http response received when downloading a file - * @param destinationStream the stream where the file should be written to - * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact */ - pipeResponseToFile(response, destinationStream, isGzip) { + patchArtifactSize(size, artifactName) { return __awaiter(this, void 0, void 0, function* () { - yield new Promise((resolve, reject) => { - if (isGzip) { - const gunzip = zlib.createGunzip(); - response.message - .on('error', error => { - core.error(`An error occurred while attempting to read the response stream`); - gunzip.close(); - destinationStream.close(); - reject(error); - }) - .pipe(gunzip) - .on('error', error => { - core.error(`An error occurred while attempting to decompress the response stream`); - destinationStream.close(); - reject(error); - }) - .pipe(destinationStream) - .on('close', () => { - resolve(); - }) - .on('error', error => { - core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); - reject(error); - }); - } - else { - response.message - .on('error', error => { - core.error(`An error occurred while attempting to read the response stream`); - destinationStream.close(); - reject(error); - }) - .pipe(destinationStream) - .on('close', () => { - resolve(); - }) - .on('error', error => { - core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); - reject(error); - }); - } - }); - return; + const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); + resourceUrl.searchParams.append('artifactName', artifactName); + const parameters = { Size: size }; + const data = JSON.stringify(parameters, null, 2); + core.debug(`URL is ${resourceUrl.toString()}`); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + // Extra information to display when a particular HTTP code is returned + const customErrorMessages = new Map([ + [ + http_client_1.HttpCodes.NotFound, + `An Artifact with the name ${artifactName} was not found` + ] + ]); + // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this + const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); + yield response.readBody(); + core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); } } -exports.DownloadHttpClient = DownloadHttpClient; -//# sourceMappingURL=download-http-client.js.map +exports.UploadHttpClient = UploadHttpClient; +//# sourceMappingURL=upload-http-client.js.map /***/ }), -/***/ 856: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -exports.alphasort = alphasort -exports.alphasorti = alphasorti -exports.setopts = setopts -exports.ownProp = ownProp -exports.makeAbs = makeAbs -exports.finish = finish -exports.mark = mark -exports.isIgnored = isIgnored -exports.childrenIgnored = childrenIgnored - -function ownProp (obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field) -} - -var path = __webpack_require__(622) -var minimatch = __webpack_require__(93) -var isAbsolute = __webpack_require__(681) -var Minimatch = minimatch.Minimatch - -function alphasorti (a, b) { - return a.toLowerCase().localeCompare(b.toLowerCase()) -} - -function alphasort (a, b) { - return a.localeCompare(b) -} - -function setupIgnores (self, options) { - self.ignore = options.ignore || [] - - if (!Array.isArray(self.ignore)) - self.ignore = [self.ignore] +/***/ 835: +/***/ (function(module) { - if (self.ignore.length) { - self.ignore = self.ignore.map(ignoreMap) - } -} +module.exports = require("url"); -// ignore patterns are always in dot:true mode. -function ignoreMap (pattern) { - var gmatcher = null - if (pattern.slice(-3) === '/**') { - var gpattern = pattern.replace(/(\/\*\*)+$/, '') - gmatcher = new Minimatch(gpattern, { dot: true }) - } +/***/ }), - return { - matcher: new Minimatch(pattern, { dot: true }), - gmatcher: gmatcher - } -} +/***/ 851: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -function setopts (self, pattern, options) { - if (!options) - options = {} +"use strict"; - // base-matching: just use globstar for that. - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar") +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpManager = void 0; +const utils_1 = __webpack_require__(128); +/** + * Used for managing http clients during either upload or download + */ +class HttpManager { + constructor(clientCount, userAgent) { + if (clientCount < 1) { + throw new Error('There must be at least one client'); + } + this.userAgent = userAgent; + this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); } - pattern = "**/" + pattern - } - - self.silent = !!options.silent - self.pattern = pattern - self.strict = options.strict !== false - self.realpath = !!options.realpath - self.realpathCache = options.realpathCache || Object.create(null) - self.follow = !!options.follow - self.dot = !!options.dot - self.mark = !!options.mark - self.nodir = !!options.nodir - if (self.nodir) - self.mark = true - self.sync = !!options.sync - self.nounique = !!options.nounique - self.nonull = !!options.nonull - self.nosort = !!options.nosort - self.nocase = !!options.nocase - self.stat = !!options.stat - self.noprocess = !!options.noprocess - self.absolute = !!options.absolute - - self.maxLength = options.maxLength || Infinity - self.cache = options.cache || Object.create(null) - self.statCache = options.statCache || Object.create(null) - self.symlinks = options.symlinks || Object.create(null) - - setupIgnores(self, options) - - self.changedCwd = false - var cwd = process.cwd() - if (!ownProp(options, "cwd")) - self.cwd = cwd - else { - self.cwd = path.resolve(options.cwd) - self.changedCwd = self.cwd !== cwd - } - - self.root = options.root || path.resolve(self.cwd, "/") - self.root = path.resolve(self.root) - if (process.platform === "win32") - self.root = self.root.replace(/\\/g, "/") - - // TODO: is an absolute `cwd` supposed to be resolved against `root`? - // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') - self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) - if (process.platform === "win32") - self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") - self.nomount = !!options.nomount - - // disable comments and negation in Minimatch. - // Note that they are not supported in Glob itself anyway. - options.nonegate = true - options.nocomment = true - - self.minimatch = new Minimatch(pattern, options) - self.options = self.minimatch.options -} - -function finish (self) { - var nou = self.nounique - var all = nou ? [] : Object.create(null) - - for (var i = 0, l = self.matches.length; i < l; i ++) { - var matches = self.matches[i] - if (!matches || Object.keys(matches).length === 0) { - if (self.nonull) { - // do like the shell, and spit out the literal glob - var literal = self.minimatch.globSet[i] - if (nou) - all.push(literal) - else - all[literal] = true - } - } else { - // had matches - var m = Object.keys(matches) - if (nou) - all.push.apply(all, m) - else - m.forEach(function (m) { - all[m] = true - }) + getClient(index) { + return this.clients[index]; } - } - - if (!nou) - all = Object.keys(all) - - if (!self.nosort) - all = all.sort(self.nocase ? alphasorti : alphasort) - - // at *some* point we statted all of these - if (self.mark) { - for (var i = 0; i < all.length; i++) { - all[i] = self._mark(all[i]) + // client disposal is necessary if a keep-alive connection is used to properly close the connection + // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 + disposeAndReplaceClient(index) { + this.clients[index].dispose(); + this.clients[index] = utils_1.createHttpClient(this.userAgent); } - if (self.nodir) { - all = all.filter(function (e) { - var notDir = !(/\/$/.test(e)) - var c = self.cache[e] || self.cache[makeAbs(self, e)] - if (notDir && c) - notDir = c !== 'DIR' && !Array.isArray(c) - return notDir - }) + disposeAndReplaceAllClients() { + for (const [index] of this.clients.entries()) { + this.disposeAndReplaceClient(index); + } } - } - - if (self.ignore.length) - all = all.filter(function(m) { - return !isIgnored(self, m) - }) - - self.found = all } +exports.HttpManager = HttpManager; +//# sourceMappingURL=http-manager.js.map -function mark (self, p) { - var abs = makeAbs(self, p) - var c = self.cache[abs] - var m = p - if (c) { - var isDir = c === 'DIR' || Array.isArray(c) - var slash = p.slice(-1) === '/' +/***/ }), - if (isDir && !slash) - m += '/' - else if (!isDir && slash) - m = m.slice(0, -1) +/***/ 857: +/***/ (function(module) { - if (m !== p) { - var mabs = makeAbs(self, m) - self.statCache[mabs] = self.statCache[abs] - self.cache[mabs] = self.cache[abs] +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); } - } - - return m -} - -// lotta situps... -function makeAbs (self, f) { - var abs = f - if (f.charAt(0) === '/') { - abs = path.join(self.root, f) - } else if (isAbsolute(f) || f === '') { - abs = f - } else if (self.changedCwd) { - abs = path.resolve(self.cwd, f) - } else { - abs = path.resolve(f) - } - - if (process.platform === 'win32') - abs = abs.replace(/\\/g, '/') + return res; +}; - return abs -} +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; -// Return true, if pattern ends with globstar '**', for the accompanying parent directory. -// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents -function isIgnored (self, path) { - if (!self.ignore.length) - return false +/***/ }), - return self.ignore.some(function(item) { - return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) - }) -} +/***/ 858: +/***/ (function(__unusedmodule, exports) { -function childrenIgnored (self, path) { - if (!self.ignore.length) - return false +"use strict"; - return self.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path)) - }) -} +Object.defineProperty(exports, "__esModule", { value: true }); +var Inputs; +(function (Inputs) { + Inputs["Name"] = "name"; + Inputs["Path"] = "path"; + Inputs["IfNoFilesFound"] = "if-no-files-found"; + Inputs["RetentionDays"] = "retention-days"; +})(Inputs = exports.Inputs || (exports.Inputs = {})); +var NoFileOptions; +(function (NoFileOptions) { + /** + * Default. Output a warning but do not fail the action + */ + NoFileOptions["warn"] = "warn"; + /** + * Fail the action with an error message + */ + NoFileOptions["error"] = "error"; + /** + * Do not output any warnings or errors, the action does not fail + */ + NoFileOptions["ignore"] = "ignore"; +})(NoFileOptions = exports.NoFileOptions || (exports.NoFileOptions = {})); /***/ }), -/***/ 870: +/***/ 898: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; Object.defineProperty(exports, "__esModule", { value: true }); -exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0; -const core_1 = __webpack_require__(470); -const fs_1 = __webpack_require__(747); -const http_client_1 = __webpack_require__(539); -const auth_1 = __webpack_require__(226); -const config_variables_1 = __webpack_require__(401); -/** - * Returns a retry time in milliseconds that exponentially gets larger - * depending on the amount of retries that have been attempted - */ -function getExponentialRetryTimeInMilliseconds(retryCount) { - if (retryCount < 0) { - throw new Error('RetryCount should not be negative'); - } - else if (retryCount === 0) { - return config_variables_1.getInitialRetryIntervalInMilliseconds(); - } - const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; - const maxTime = minTime * config_variables_1.getRetryMultiplier(); - // returns a random number between the minTime (inclusive) and the maxTime (exclusive) - return Math.trunc(Math.random() * (maxTime - minTime) + minTime); -} -exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds; -/** - * Parses a env variable that is a number - */ -function parseEnvNumber(key) { - const value = Number(process.env[key]); - if (Number.isNaN(value) || value < 0) { - return undefined; - } - return value; -} -exports.parseEnvNumber = parseEnvNumber; -/** - * Various utility functions to help with the necessary API calls - */ -function getApiVersion() { - return '6.0-preview'; -} -exports.getApiVersion = getApiVersion; -function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode >= 200 && statusCode < 300; -} -exports.isSuccessStatusCode = isSuccessStatusCode; -function isForbiddenStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode === http_client_1.HttpCodes.Forbidden; -} -exports.isForbiddenStatusCode = isForbiddenStatusCode; -function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; - } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.GatewayTimeout, - http_client_1.HttpCodes.InternalServerError, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.TooManyRequests, - 413 // Payload Too Large - ]; - return retryableStatusCodes.includes(statusCode); -} -exports.isRetryableStatusCode = isRetryableStatusCode; -function isThrottledStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode === http_client_1.HttpCodes.TooManyRequests; -} -exports.isThrottledStatusCode = isThrottledStatusCode; +const core = __webpack_require__(676); /** - * Attempts to get the retry-after value from a set of http headers. The retry time - * is originally denoted in seconds, so if present, it is converted to milliseconds - * @param headers all the headers received when making an http call + * Returns a copy with defaults filled in. */ -function tryGetRetryAfterValueTimeInMilliseconds(headers) { - if (headers['retry-after']) { - const retryTime = Number(headers['retry-after']); - if (!isNaN(retryTime)) { - core_1.info(`Retry-After header is present with a value of ${retryTime}`); - return retryTime * 1000; +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } - core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); - return undefined; - } - core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`); - // eslint-disable-next-line no-console - console.log(headers); - return undefined; -} -exports.tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds; -function getContentRange(start, end, total) { - // Format: `bytes start-end/fileSize - // start and end are inclusive - // For a 200 byte chunk starting at byte 0: - // Content-Range: bytes 0-199/200 - return `bytes ${start}-${end}/${total}`; -} -exports.getContentRange = getContentRange; -/** - * Sets all the necessary headers when downloading an artifact - * @param {string} contentType the type of content being uploaded - * @param {boolean} isKeepAlive is the same connection being used to make multiple calls - * @param {boolean} acceptGzip can we accept a gzip encoded response - * @param {string} acceptType the type of content that we can accept - * @returns appropriate headers to make a specific http call during artifact download - */ -function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) { - const requestOptions = {}; - if (contentType) { - requestOptions['Content-Type'] = contentType; - } - if (isKeepAlive) { - requestOptions['Connection'] = 'Keep-Alive'; - // keep alive for at least 10 seconds before closing the connection - requestOptions['Keep-Alive'] = '10'; - } - if (acceptGzip) { - // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header - requestOptions['Accept-Encoding'] = 'gzip'; - requestOptions['Accept'] = `application/octet-stream;api-version=${getApiVersion()}`; - } - else { - // default to application/json if we are not working with gzip content - requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; } - return requestOptions; + return result; } -exports.getDownloadHeaders = getDownloadHeaders; -/** - * Sets all the necessary headers when uploading an artifact - * @param {string} contentType the type of content being uploaded - * @param {boolean} isKeepAlive is the same connection being used to make multiple calls - * @param {boolean} isGzip is the connection being used to upload GZip compressed content - * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed - * @param {number} contentLength the length of the content that is being uploaded - * @param {string} contentRange the range of the content that is being uploaded - * @returns appropriate headers to make a specific http call during artifact upload - */ -function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange) { - const requestOptions = {}; - requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; - if (contentType) { - requestOptions['Content-Type'] = contentType; - } - if (isKeepAlive) { - requestOptions['Connection'] = 'Keep-Alive'; - // keep alive for at least 10 seconds before closing the connection - requestOptions['Keep-Alive'] = '10'; - } - if (isGzip) { - requestOptions['Content-Encoding'] = 'gzip'; - requestOptions['x-tfs-filelength'] = uncompressedLength; - } - if (contentLength) { - requestOptions['Content-Length'] = contentLength; +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), + +/***/ 909: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = __webpack_require__(747) +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = __webpack_require__(313) + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) } - if (contentRange) { - requestOptions['Content-Range'] = contentRange; + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er } - return requestOptions; + } } -exports.getUploadHeaders = getUploadHeaders; -function createHttpClient(userAgent) { - return new http_client_1.HttpClient(userAgent, [ - new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken()) - ]); + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync } -exports.createHttpClient = createHttpClient; -function getArtifactUrl() { - const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; - core_1.debug(`Artifact Url: ${artifactUrl}`); - return artifactUrl; + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync } -exports.getArtifactUrl = getArtifactUrl; -/** - * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information - * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but - * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only - * the information that we want. - * - * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. - * Other information such as the headers, the response code and message might be useful, so this is displayed. - */ -function displayHttpDiagnostics(response) { - core_1.info(`##### Begin Diagnostic HTTP information ##### -Status Code: ${response.message.statusCode} -Status Message: ${response.message.statusMessage} -Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} -###### End Diagnostic HTTP information ######`); + + +/***/ }), + +/***/ 913: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; } -exports.displayHttpDiagnostics = displayHttpDiagnostics; -function createDirectoriesForArtifact(directories) { - return __awaiter(this, void 0, void 0, function* () { - for (const directory of directories) { - yield fs_1.promises.mkdir(directory, { - recursive: true - }); - } - }); + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -exports.createDirectoriesForArtifact = createDirectoriesForArtifact; -function createEmptyFilesForArtifact(emptyFilesToCreate) { - return __awaiter(this, void 0, void 0, function* () { - for (const filePath of emptyFilesToCreate) { - yield (yield fs_1.promises.open(filePath, 'w')).close(); - } - }); + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; } -exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact; -function getFileSize(filePath) { - return __awaiter(this, void 0, void 0, function* () { - const stats = yield fs_1.promises.stat(filePath); - core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); - return stats.size; - }); + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -exports.getFileSize = getFileSize; -function rmFile(filePath) { - return __awaiter(this, void 0, void 0, function* () { - yield fs_1.promises.unlink(filePath); - }); + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); } -exports.rmFile = rmFile; -function getProperRetention(retentionInput, retentionSetting) { - if (retentionInput < 0) { - throw new Error('Invalid retention, minimum value is 1.'); +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); } - let retention = retentionInput; - if (retentionSetting) { - const maxRetention = parseInt(retentionSetting); - if (!isNaN(maxRetention) && maxRetention < retention) { - core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); - retention = maxRetention; - } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); } - return retention; -} -exports.getProperRetention = getProperRetention; -function sleep(milliseconds) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => setTimeout(resolve, milliseconds)); - }); -} -exports.sleep = sleep; -//# sourceMappingURL=utils.js.map + }); +}; -/***/ }), +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); -/***/ 875: -/***/ (function(module, __unusedexports, __webpack_require__) { + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } -"use strict"; + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); -const { promisify } = __webpack_require__(669); -const tmp = __webpack_require__(150); + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } -// file -module.exports.fileSync = tmp.fileSync; -const fileWithOptions = promisify((options, cb) => - tmp.file(options, (err, path, fd, cleanup) => - err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) }) - ) -); -module.exports.file = async (options) => fileWithOptions(options); + function onError(cause) { + connectReq.removeAllListeners(); -module.exports.withFile = async function withFile(fn, options) { - const { path, fd, cleanup } = await module.exports.file(options); - try { - return await fn({ path, fd }); - } finally { - await cleanup(); + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); } }; +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); -// directory -module.exports.dirSync = tmp.dirSync; -const dirWithOptions = promisify((options, cb) => - tmp.dir(options, (err, path, cleanup) => - err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) }) - ) -); -module.exports.dir = async (options) => dirWithOptions(options); - -module.exports.withDir = async function withDir(fn, options) { - const { path, cleanup } = await module.exports.dir(options); - try { - return await fn({ path }); - } finally { - await cleanup(); + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); } }; +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); -// name generation -module.exports.tmpNameSync = tmp.tmpNameSync; -module.exports.tmpName = promisify(tmp.tmpName); - -module.exports.tmpdir = tmp.tmpdir; + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} -module.exports.setGracefulCleanup = tmp.setGracefulCleanup; +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} -/***/ }), +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} -/***/ 896: -/***/ (function(module) { -module.exports = function (xs, fn) { - var res = []; - for (var i = 0; i < xs.length; i++) { - var x = fn(xs[i], i); - if (isArray(x)) res.push.apply(res, x); - else res.push(x); +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); } - return res; -}; - -var isArray = Array.isArray || function (xs) { - return Object.prototype.toString.call(xs) === '[object Array]'; -}; + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test /***/ }), -/***/ 923: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; +/***/ 917: +/***/ (function(module) { -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = __webpack_require__(357); -const os = __webpack_require__(87); -const path = __webpack_require__(622); -const pathHelper = __webpack_require__(972); -const minimatch_1 = __webpack_require__(93); -const internal_match_kind_1 = __webpack_require__(327); -const internal_path_1 = __webpack_require__(383); -const IS_WINDOWS = process.platform === 'win32'; -class Pattern { - constructor(patternOrNegate, segments) { - /** - * Indicates whether matches should be excluded from the result set - */ - this.negate = false; - // Pattern overload - let pattern; - if (typeof patternOrNegate === 'string') { - pattern = patternOrNegate.trim(); - } - // Segments overload - else { - // Convert to pattern - segments = segments || []; - assert(segments.length, `Parameter 'segments' must not empty`); - const root = Pattern.getLiteral(segments[0]); - assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; - } - } - // Negate - while (pattern.startsWith('!')) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); - } - // Normalize slashes and ensures absolute root - pattern = Pattern.fixupPattern(pattern); - // Segments - this.segments = new internal_path_1.Path(pattern).segments; - // Trailing slash indicates the pattern should only match directories, not regular files - this.trailingSeparator = pathHelper - .normalizeSeparators(pattern) - .endsWith(path.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - // Search path (literal path prior to the first glob segment) - let foundGlob = false; - const searchSegments = this.segments - .map(x => Pattern.getLiteral(x)) - .filter(x => !foundGlob && !(foundGlob = x === '')); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - // Root RegExp (required when determining partial match) - this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); - // Create minimatch - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); - } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - // Last segment is globstar? - if (this.segments[this.segments.length - 1] === '**') { - // Normalize slashes - itemPath = pathHelper.normalizeSeparators(itemPath); - // Append a trailing slash. Otherwise Minimatch will not match the directory immediately - // preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns - // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. - if (!itemPath.endsWith(path.sep)) { - // Note, this is safe because the constructor ensures the pattern has an absolute root. - // For example, formats like C: and C:foo on Windows are resolved to an aboslute root. - itemPath = `${itemPath}${path.sep}`; - } - } - else { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - } - // Match - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true } - return internal_match_kind_1.MatchKind.None; + }) } - /** - * Indicates whether the pattern may match descendants of the specified path - */ - partialMatch(itemPath) { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // matchOne does not handle root path correctly - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); - } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS - .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment - .replace(/\?/g, '[?]') // escape '?' - .replace(/\*/g, '[*]'); // escape '*' + } +} + + +/***/ }), + +/***/ 919: +/***/ (function(module, __unusedexports, __webpack_require__) { + +try { + var util = __webpack_require__(669); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __webpack_require__(917); +} + + +/***/ }), + +/***/ 923: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const glob = __importStar(__webpack_require__(166)); +const path = __importStar(__webpack_require__(622)); +const core_1 = __webpack_require__(676); +const fs_1 = __webpack_require__(747); +const path_1 = __webpack_require__(622); +const util_1 = __webpack_require__(669); +const stats = util_1.promisify(fs_1.stat); +function getDefaultGlobOptions() { + return { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; +} +/** + * If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as + * the delimiter to control the directory structure for the artifact. This function returns the LCA + * when given an array of search paths + * + * Example 1: The patterns `/foo/` and `/bar/` returns `/` + * + * Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo` + */ +function getMultiPathLCA(searchPaths) { + if (searchPaths.length < 2) { + throw new Error('At least two search paths must be provided'); } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern) { - // Empty - assert(pattern, 'pattern cannot be empty'); - // Must not contain `.` segment, unless first segment - // Must not contain `..` segment - const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); - assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r - assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - // Normalize slashes - pattern = pathHelper.normalizeSeparators(pattern); - // Replace leading `.` segment - if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { - pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); - } - // Replace leading `~` segment - else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { - const homedir = os.homedir(); - assert(homedir, 'Unable to determine HOME directory'); - assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = Pattern.globEscape(homedir) + pattern.substr(1); - } - // Replace relative drive root, e.g. pattern is C: or C:foo - else if (IS_WINDOWS && - (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith('\\')) { - root += '\\'; - } - pattern = Pattern.globEscape(root) + pattern.substr(2); - } - // Replace relative root, e.g. pattern is \ or \foo - else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); - if (!root.endsWith('\\')) { - root += '\\'; + const commonPaths = new Array(); + const splitPaths = new Array(); + let smallestPathLength = Number.MAX_SAFE_INTEGER; + // split each of the search paths using the platform specific separator + for (const searchPath of searchPaths) { + core_1.debug(`Using search path ${searchPath}`); + const splitSearchPath = path.normalize(searchPath).split(path.sep); + // keep track of the smallest path length so that we don't accidentally later go out of bounds + smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length); + splitPaths.push(splitSearchPath); + } + // on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it + if (searchPaths[0].startsWith(path.sep)) { + commonPaths.push(path.sep); + } + let splitIndex = 0; + // function to check if the paths are the same at a specific index + function isPathTheSame() { + const compare = splitPaths[0][splitIndex]; + for (let i = 1; i < splitPaths.length; i++) { + if (compare !== splitPaths[i][splitIndex]) { + // a non-common index has been reached + return false; } - pattern = Pattern.globEscape(root) + pattern.substr(1); } - // Otherwise ensure absolute root - else { - pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + return true; + } + // loop over all the search paths until there is a non-common ancestor or we go out of bounds + while (splitIndex < smallestPathLength) { + if (!isPathTheSame()) { + break; } - return pathHelper.normalizeSeparators(pattern); + // if all are the same, add to the end result & increment the index + commonPaths.push(splitPaths[0][splitIndex]); + splitIndex++; } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ''; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - // Escape - if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } - // Wildcard - else if (c === '*' || c === '?') { - return ''; - } - // Character set - else if (c === '[' && i + 1 < segment.length) { - let set = ''; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - // Escape - if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { - set += segment[++i2]; - continue; - } - // Closed - else if (c2 === ']') { - closed = i2; - break; - } - // Otherwise - else { - set += c2; - } + return path.join(...commonPaths); +} +function findFilesToUpload(searchPath, globOptions) { + return __awaiter(this, void 0, void 0, function* () { + const searchResults = []; + const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions()); + const rawSearchResults = yield globber.glob(); + /* + Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten + Detect any files that could be overwritten for user awareness + */ + const set = new Set(); + /* + Directories will be rejected if attempted to be uploaded. This includes just empty + directories so filter any directories out from the raw search results + */ + for (const searchResult of rawSearchResults) { + const fileStats = yield stats(searchResult); + // isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead + if (!fileStats.isDirectory()) { + core_1.debug(`File:${searchResult} was found using the provided searchPath`); + searchResults.push(searchResult); + // detect any files that would be overwritten because of case insensitivity + if (set.has(searchResult.toLowerCase())) { + core_1.info(`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`); } - // Closed? - if (closed >= 0) { - // Cannot convert - if (set.length > 1) { - return ''; - } - // Convert to literal - if (set) { - literal += set; - i = closed; - continue; - } + else { + set.add(searchResult.toLowerCase()); } - // Otherwise fall thru } - // Append - literal += c; + else { + core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`); + } } - return literal; - } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); - } + // Calculate the root directory for the artifact using the search paths that were utilized + const searchPaths = globber.getSearchPaths(); + if (searchPaths.length > 1) { + core_1.info(`Multiple search paths detected. Calculating the least common ancestor of all paths`); + const lcaSearchPath = getMultiPathLCA(searchPaths); + core_1.info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`); + return { + filesToUpload: searchResults, + rootDirectory: lcaSearchPath + }; + } + /* + Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is + not preserved and the root directory will be the single files parent directory + */ + if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) { + return { + filesToUpload: searchResults, + rootDirectory: path_1.dirname(searchResults[0]) + }; + } + return { + filesToUpload: searchResults, + rootDirectory: searchPaths[0] + }; + }); } -exports.Pattern = Pattern; -//# sourceMappingURL=internal-pattern.js.map +exports.findFilesToUpload = findFilesToUpload; + /***/ }), -/***/ 950: -/***/ (function(__unusedmodule, exports) { +/***/ 936: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; Object.defineProperty(exports, "__esModule", { value: true }); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = new URL(proxyVar); - } - return proxyUrl; +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(615); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); } -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; } - return false; } -exports.checkBypass = checkBypass; - +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map /***/ }), -/***/ 972: +/***/ 941: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; Object.defineProperty(exports, "__esModule", { value: true }); -const assert = __webpack_require__(357); +const core = __webpack_require__(676); +const fs = __webpack_require__(747); +const globOptionsHelper = __webpack_require__(898); const path = __webpack_require__(622); +const patternHelper = __webpack_require__(365); +const internal_match_kind_1 = __webpack_require__(432); +const internal_pattern_1 = __webpack_require__(609); +const internal_search_state_1 = __webpack_require__(242); const IS_WINDOWS = process.platform === 'win32'; -/** - * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. - * - * For example, on Linux/macOS: - * - `/ => /` - * - `/hello => /` - * - * For example, on Windows: - * - `C:\ => C:\` - * - `C:\hello => C:\` - * - `C: => C:` - * - `C:hello => C:` - * - `\ => \` - * - `\hello => \` - * - `\\hello => \\hello` - * - `\\hello\world => \\hello\world` - */ -function dirname(p) { - // Normalize slashes and trim unnecessary trailing slash - p = safeTrimTrailingSeparator(p); - // Windows UNC root, e.g. \\hello or \\hello\world - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); } - // Get dirname - let result = path.dirname(p); - // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); } - return result; -} -exports.dirname = dirname; -/** - * Roots the path if not already rooted. On Windows, relative roots like `\` - * or `C:` are expanded based on the current working directory. - */ -function ensureAbsoluteRoot(root, itemPath) { - assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - // Already rooted - if (hasAbsoluteRoot(itemPath)) { - return itemPath; + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); } - // Windows - if (IS_WINDOWS) { - // Check for itemPath like C: or C:foo - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - // Drive letter matches cwd? Expand to cwd - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - // Drive only, e.g. C: - if (itemPath.length === 2) { - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}`; + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; } - // Drive + path, e.g. C:foo + // Pattern else { - if (!cwd.endsWith('\\')) { - cwd += '\\'; + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; + throw err; } } - // Different drive else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); } - } - // Check for itemPath like \ or \foo - else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; - } - } - assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - // Otherwise ensure root ends with a separator - if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { - // Intentionally empty - } - else { - // Append separator - root += path.sep; - } - return root + itemPath; -} -exports.ensureAbsoluteRoot = ensureAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\\hello\share` and `C:\hello` (and using alternate separator). - */ -function hasAbsoluteRoot(itemPath) { - assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \\hello\share or C:\hello - return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasAbsoluteRoot = hasAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). - */ -function hasRoot(itemPath) { - assert(itemPath, `isRooted parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \ or \hello or \\hello - // E.g. C: or C:\hello - return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasRoot = hasRoot; -/** - * Removes redundant slashes and converts `/` to `\` on Windows - */ -function normalizeSeparators(p) { - p = p || ''; - // Windows - if (IS_WINDOWS) { - // Convert slashes on Windows - p = p.replace(/\//g, '\\'); - // Remove redundant slashes - const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello - return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC - } - // Remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -exports.normalizeSeparators = normalizeSeparators; -/** - * Normalizes the path separators and trims the trailing separator (when safe). - * For example, `/foo/ => /foo` but `/ => /` - */ -function safeTrimTrailingSeparator(p) { - // Short-circuit if empty - if (!p) { - return ''; - } - // Normalize separators - p = normalizeSeparators(p); - // No trailing slash - if (!p.endsWith(path.sep)) { - return p; - } - // Check '/' on Linux/macOS and '\' on Windows - if (p === path.sep) { - return p; - } - // On Windows check if drive root. E.g. C:\ - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); } - // Otherwise trim trailing slash - return p.substr(0, p.length - 1); } -exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; -//# sourceMappingURL=internal-path-helper.js.map - -/***/ }), - -/***/ 988: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = __webpack_require__(141); - +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map /***/ }) diff --git a/package-lock.json b/package-lock.json index 0e354722..0602d607 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "3.0.0", "license": "MIT", "dependencies": { - "@actions/artifact": "^1.0.0", + "@actions/artifact": "^1.1.0", "@actions/core": "^1.2.6", "@actions/glob": "^0.1.0", "@actions/io": "^1.0.2" @@ -32,12 +32,12 @@ } }, "node_modules/@actions/artifact": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.0.0.tgz", - "integrity": "sha512-oje+cCiM2maVwoiN+LT9kh2C6UqiTcS1tDKins+nRfckX+C8JJD2kAmzpD5fn/p5Dibjrqk1mtwreAzgNxHrDg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.0.tgz", + "integrity": "sha512-shO+w/BAnzRnFhfsgUao8sxjByAMqDdfvek2LLKeCueBKXoTrAcp7U/hs9Fdx+z9g7Q0mbIrmHAzAAww4HK1bQ==", "dependencies": { "@actions/core": "^1.2.6", - "@actions/http-client": "^1.0.11", + "@actions/http-client": "^2.0.1", "tmp": "^0.2.1", "tmp-promise": "^3.0.2" } @@ -57,11 +57,11 @@ } }, "node_modules/@actions/http-client": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", - "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", "dependencies": { - "tunnel": "0.0.6" + "tunnel": "^0.0.6" } }, "node_modules/@actions/io": { @@ -6789,7 +6789,10 @@ "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } }, "node_modules/type-check": { "version": "0.3.2", @@ -7067,12 +7070,12 @@ }, "dependencies": { "@actions/artifact": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.0.0.tgz", - "integrity": "sha512-oje+cCiM2maVwoiN+LT9kh2C6UqiTcS1tDKins+nRfckX+C8JJD2kAmzpD5fn/p5Dibjrqk1mtwreAzgNxHrDg==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.0.tgz", + "integrity": "sha512-shO+w/BAnzRnFhfsgUao8sxjByAMqDdfvek2LLKeCueBKXoTrAcp7U/hs9Fdx+z9g7Q0mbIrmHAzAAww4HK1bQ==", "requires": { "@actions/core": "^1.2.6", - "@actions/http-client": "^1.0.11", + "@actions/http-client": "^2.0.1", "tmp": "^0.2.1", "tmp-promise": "^3.0.2" } @@ -7092,11 +7095,11 @@ } }, "@actions/http-client": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-1.0.11.tgz", - "integrity": "sha512-VRYHGQV1rqnROJqdMvGUbY/Kn8vriQe/F9HR2AlYHzmKuM/p3kjNuXhmdBfcVgsvRWTz5C5XW5xvndZrVBuAYg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.0.1.tgz", + "integrity": "sha512-PIXiMVtz6VvyaRsGY268qvj57hXQEpsYogYOu2nrQhlf+XCGmZstmuZBbAybUl1nQGnvS1k1eEsQ69ZoD7xlSw==", "requires": { - "tunnel": "0.0.6" + "tunnel": "^0.0.6" } }, "@actions/io": { diff --git a/package.json b/package.json index c6f6f907..c7854f66 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ }, "homepage": "https://github.com/actions/upload-artifact#readme", "dependencies": { - "@actions/artifact": "^1.0.0", + "@actions/artifact": "^1.1.0", "@actions/core": "^1.2.6", "@actions/glob": "^0.1.0", "@actions/io": "^1.0.2" From 4d3986961d0423ba9a593efb490a2960eb65f43b Mon Sep 17 00:00:00 2001 From: Rob Herley Date: Thu, 19 May 2022 20:46:08 +0000 Subject: [PATCH 2/3] recompile with correct ncc version --- dist/index.js | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/dist/index.js b/dist/index.js index fda863fb..e83304cf 100644 --- a/dist/index.js +++ b/dist/index.js @@ -19,13 +19,7 @@ module.exports = /******/ }; /******/ /******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete installedModules[moduleId]; -/******/ } +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ /******/ // Flag the module as loaded /******/ module.l = true; From 849aa7758a428ee22be38de371b49c8bd57c4b9d Mon Sep 17 00:00:00 2001 From: Rob Herley Date: Thu, 19 May 2022 21:17:44 +0000 Subject: [PATCH 3/3] nvm use 12 & npm run release --- dist/index.js | 15202 ++++++++++++++++++++++++------------------------ 1 file changed, 7601 insertions(+), 7601 deletions(-) diff --git a/dist/index.js b/dist/index.js index e83304cf..2be9f506 100644 --- a/dist/index.js +++ b/dist/index.js @@ -34,7 +34,7 @@ module.exports = /******/ // the startup function /******/ function startup() { /******/ // Load entry module and return exports -/******/ return __webpack_require__(799); +/******/ return __webpack_require__(385); /******/ }; /******/ /******/ // run startup @@ -43,4264 +43,1472 @@ module.exports = /************************************************************************/ /******/ ({ -/***/ 16: +/***/ 11: /***/ (function(module) { -module.exports = require("tls"); +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) -/***/ }), + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') -/***/ 20: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) -"use strict"; + return wrapper -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DownloadHttpClient = void 0; -const fs = __importStar(__webpack_require__(747)); -const core = __importStar(__webpack_require__(676)); -const zlib = __importStar(__webpack_require__(761)); -const utils_1 = __webpack_require__(128); -const url_1 = __webpack_require__(835); -const status_reporter_1 = __webpack_require__(722); -const perf_hooks_1 = __webpack_require__(630); -const http_manager_1 = __webpack_require__(851); -const config_variables_1 = __webpack_require__(750); -const requestUtils_1 = __webpack_require__(682); -class DownloadHttpClient { - constructor() { - this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download'); - // downloads are usually significantly faster than uploads so display status information every second - this.statusReporter = new status_reporter_1.StatusReporter(1000); - } - /** - * Gets a list of all artifacts that are in a specific container - */ - listArtifacts() { - return __awaiter(this, void 0, void 0, function* () { - const artifactUrl = utils_1.getArtifactUrl(); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.downloadHttpManager.getClient(0); - const headers = utils_1.getDownloadHeaders('application/json'); - const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); - const body = yield response.readBody(); - return JSON.parse(body); - }); - } - /** - * Fetches a set of container items that describe the contents of an artifact - * @param artifactName the name of the artifact - * @param containerUrl the artifact container URL for the run - */ - getContainerItems(artifactName, containerUrl) { - return __awaiter(this, void 0, void 0, function* () { - // the itemPath search parameter controls which containers will be returned - const resourceUrl = new url_1.URL(containerUrl); - resourceUrl.searchParams.append('itemPath', artifactName); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.downloadHttpManager.getClient(0); - const headers = utils_1.getDownloadHeaders('application/json'); - const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); - const body = yield response.readBody(); - return JSON.parse(body); - }); - } - /** - * Concurrently downloads all the files that are part of an artifact - * @param downloadItems information about what items to download and where to save them - */ - downloadSingleArtifact(downloadItems) { - return __awaiter(this, void 0, void 0, function* () { - const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency(); - // limit the number of files downloaded at a single time - core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); - const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; - let currentFile = 0; - let downloadedFiles = 0; - core.info(`Total number of files that will be downloaded: ${downloadItems.length}`); - this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); - this.statusReporter.start(); - yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () { - while (currentFile < downloadItems.length) { - const currentFileToDownload = downloadItems[currentFile]; - currentFile += 1; - const startTime = perf_hooks_1.performance.now(); - yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); - if (core.isDebug()) { - core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); - } - this.statusReporter.incrementProcessedCount(); - } - }))) - .catch(error => { - throw new Error(`Unable to download the artifact: ${error}`); - }) - .finally(() => { - this.statusReporter.stop(); - // safety dispose all connections - this.downloadHttpManager.disposeAndReplaceAllClients(); - }); - }); + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] } - /** - * Downloads an individual file - * @param httpClientIndex the index of the http client that is used to make all of the calls - * @param artifactLocation origin location where a file will be downloaded from - * @param downloadPath destination location for the file being downloaded - */ - downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) { - return __awaiter(this, void 0, void 0, function* () { - let retryCount = 0; - const retryLimit = config_variables_1.getRetryLimit(); - let destinationStream = fs.createWriteStream(downloadPath); - const headers = utils_1.getDownloadHeaders('application/json', true, true); - // a single GET request is used to download a file - const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () { - const client = this.downloadHttpManager.getClient(httpClientIndex); - return yield client.get(artifactLocation, headers); - }); - // check the response headers to determine if the file was compressed using gzip - const isGzip = (incomingHeaders) => { - return ('content-encoding' in incomingHeaders && - incomingHeaders['content-encoding'] === 'gzip'); - }; - // Increments the current retry count and then checks if the retry limit has been reached - // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided, - // it will be used - const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { - retryCount++; - if (retryCount > retryLimit) { - return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`)); - } - else { - this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); - if (retryAfterValue) { - // Back off by waiting the specified time denoted by the retry-after header - core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); - yield utils_1.sleep(retryAfterValue); - } - else { - // Back off using an exponential value that depends on the retry count - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); - core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); - yield utils_1.sleep(backoffTime); - } - core.info(`Finished backoff for retry #${retryCount}, continuing with download`); - } - }); - const isAllBytesReceived = (expected, received) => { - // be lenient, if any input is missing, assume success, i.e. not truncated - if (!expected || - !received || - process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) { - core.info('Skipping download validation.'); - return true; - } - return parseInt(expected) === received; - }; - const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () { - destinationStream.close(); - yield utils_1.rmFile(fileDownloadPath); - destinationStream = fs.createWriteStream(fileDownloadPath); - }); - // keep trying to download a file until a retry limit has been reached - while (retryCount <= retryLimit) { - let response; - try { - response = yield makeDownloadRequest(); - } - catch (error) { - // if an error is caught, it is usually indicative of a timeout so retry the download - core.info('An error occurred while attempting to download a file'); - // eslint-disable-next-line no-console - console.log(error); - // increment the retryCount and use exponential backoff to wait before making the next request - yield backOff(); - continue; - } - let forceRetry = false; - if (utils_1.isSuccessStatusCode(response.message.statusCode)) { - // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string - // which can cause some gzip encoded data to be lost - // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents - try { - const isGzipped = isGzip(response.message.headers); - yield this.pipeResponseToFile(response, destinationStream, isGzipped); - if (isGzipped || - isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) { - return; - } - else { - forceRetry = true; - } - } - catch (error) { - // retry on error, most likely streams were corrupted - forceRetry = true; - } - } - if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) { - core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); - resetDestinationStream(downloadPath); - // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff - utils_1.isThrottledStatusCode(response.message.statusCode) - ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) - : yield backOff(); - } - else { - // Some unexpected response code, fail immediately and stop the download - utils_1.displayHttpDiagnostics(response); - return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`)); - } - } - }); - } - /** - * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary - * @param response the http response received when downloading a file - * @param destinationStream the stream where the file should be written to - * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it - */ - pipeResponseToFile(response, destinationStream, isGzip) { - return __awaiter(this, void 0, void 0, function* () { - yield new Promise((resolve, reject) => { - if (isGzip) { - const gunzip = zlib.createGunzip(); - response.message - .on('error', error => { - core.error(`An error occurred while attempting to read the response stream`); - gunzip.close(); - destinationStream.close(); - reject(error); - }) - .pipe(gunzip) - .on('error', error => { - core.error(`An error occurred while attempting to decompress the response stream`); - destinationStream.close(); - reject(error); - }) - .pipe(destinationStream) - .on('close', () => { - resolve(); - }) - .on('error', error => { - core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); - reject(error); - }); - } - else { - response.message - .on('error', error => { - core.error(`An error occurred while attempting to read the response stream`); - destinationStream.close(); - reject(error); - }) - .pipe(destinationStream) - .on('close', () => { - resolve(); - }) - .on('error', error => { - core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); - reject(error); - }); - } - }); - return; - }); + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) } + return ret + } } -exports.DownloadHttpClient = DownloadHttpClient; -//# sourceMappingURL=download-http-client.js.map + /***/ }), -/***/ 26: +/***/ 13: /***/ (function(module, __unusedexports, __webpack_require__) { -/*! - * Tmp - * - * Copyright (c) 2011-2017 KARASZI Istvan - * - * MIT Licensed - */ - -/* - * Module dependencies. - */ -const fs = __webpack_require__(747); -const os = __webpack_require__(87); -const path = __webpack_require__(622); -const crypto = __webpack_require__(417); -const _c = { fs: fs.constants, os: os.constants }; -const rimraf = __webpack_require__(254); +const assert = __webpack_require__(357) +const path = __webpack_require__(622) +const fs = __webpack_require__(747) +let glob = undefined +try { + glob = __webpack_require__(402) +} catch (_err) { + // treat glob as optional. +} -/* - * The working inner variables. - */ -const - // the random characters to choose from - RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', +const defaultGlobOpts = { + nosort: true, + silent: true +} - TEMPLATE_PATTERN = /XXXXXX/, +// for EMFILE handling +let timeout = 0 - DEFAULT_TRIES = 3, +const isWindows = (process.platform === "win32") - CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), +const defaults = options => { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) - // constants are off on the windows platform and will not match the actual errno codes - IS_WIN32 = os.platform() === 'win32', - EBADF = _c.EBADF || _c.os.errno.EBADF, - ENOENT = _c.ENOENT || _c.os.errno.ENOENT, + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} - DIR_MODE = 0o700 /* 448 */, - FILE_MODE = 0o600 /* 384 */, +const rimraf = (p, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} + } - EXIT = 'exit', + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - // this will hold the objects need to be removed on exit - _removeObjects = [], + defaults(options) - // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback - FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), - FN_RIMRAF_SYNC = rimraf.sync; + let busyTries = 0 + let errState = null + let n = 0 -let - _gracefulCleanup = false; + const next = (er) => { + errState = errState || er + if (--n === 0) + cb(errState) + } -/** - * Gets a temporary file name. - * - * @param {(Options|tmpNameCallback)} options options or callback - * @param {?tmpNameCallback} callback the callback function - */ -function tmpName(options, callback) { - const - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; + const afterGlob = (er, results) => { + if (er) + return cb(er) - try { - _assertAndSanitizeOptions(opts); - } catch (err) { - return cb(err); - } + n = results.length + if (n === 0) + return cb() - let tries = opts.tries; - (function _getUniqueName() { - try { - const name = _generateTmpName(opts); + results.forEach(p => { + const CB = (er) => { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) + } - // check whether the path exists then retry if needed - fs.stat(name, function (err) { - /* istanbul ignore else */ - if (!err) { - /* istanbul ignore else */ - if (tries-- > 0) return _getUniqueName(); + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(() => rimraf_(p, options, CB), timeout ++) + } - return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); + // already gone + if (er.code === "ENOENT") er = null } - cb(null, name); - }); - } catch (err) { - cb(err); - } - }()); -} + timeout = 0 + next(er) + } + rimraf_(p, options, CB) + }) + } -/** - * Synchronous version of tmpName. - * - * @param {Object} options - * @returns {string} the generated random name - * @throws {Error} if the options are invalid or could not generate a filename - */ -function tmpNameSync(options) { - const - args = _parseArguments(options), - opts = args[0]; + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) - _assertAndSanitizeOptions(opts); + options.lstat(p, (er, stat) => { + if (!er) + return afterGlob(null, [p]) - let tries = opts.tries; - do { - const name = _generateTmpName(opts); - try { - fs.statSync(name); - } catch (e) { - return name; - } - } while (tries-- > 0); + glob(p, options.glob, afterGlob) + }) - throw new Error('Could not get a unique tmp filename, max tries reached'); } -/** - * Creates and opens a temporary file. - * - * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined - * @param {?fileCallback} callback - */ -function file(options, callback) { - const - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; - - // gets a temporary filename - tmpName(opts, function _tmpNameCreated(err, name) { - /* istanbul ignore else */ - if (err) return cb(err); - - // create and open the file - fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { - /* istanbu ignore else */ - if (err) return cb(err); +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +const rimraf_ = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') - if (opts.discardDescriptor) { - return fs.close(fd, function _discardCallback(possibleErr) { - // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only - return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); - }); - } else { - // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care - // about the descriptor - const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; - cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, er => { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) } - }); - }); + return cb(er) + }) + }) } -/** - * Synchronous version of file. - * - * @param {Options} options - * @returns {FileSyncObject} object consists of name, fd and removeCallback - * @throws {Error} if cannot create a file - */ -function fileSync(options) { - const - args = _parseArguments(options), - opts = args[0]; - - const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; - const name = tmpNameSync(opts); - var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); - /* istanbul ignore else */ - if (opts.discardDescriptor) { - fs.closeSync(fd); - fd = undefined; - } +const fixWinEPERM = (p, options, er, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') - return { - name: name, - fd: fd, - removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) - }; + options.chmod(p, 0o666, er2 => { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, (er3, stats) => { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) } -/** - * Creates a temporary directory. - * - * @param {(Options|dirCallback)} options the options or the callback function - * @param {?dirCallback} callback - */ -function dir(options, callback) { - const - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; +const fixWinEPERMSync = (p, options, er) => { + assert(p) + assert(options) - // gets a temporary filename - tmpName(opts, function _tmpNameCreated(err, name) { - /* istanbul ignore else */ - if (err) return cb(err); + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } - // create the directory - fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { - /* istanbul ignore else */ - if (err) return cb(err); + let stats + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } - cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); - }); - }); + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) } -/** - * Synchronous version of dir. - * - * @param {Options} options - * @returns {DirSyncObject} object consists of name and removeCallback - * @throws {Error} if it cannot create a directory - */ -function dirSync(options) { - const - args = _parseArguments(options), - opts = args[0]; - - const name = tmpNameSync(opts); - fs.mkdirSync(name, opts.mode || DIR_MODE); +const rmdir = (p, options, originalEr, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') - return { - name: name, - removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) - }; + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) } -/** - * Removes files asynchronously. - * - * @param {Object} fdPath - * @param {Function} next - * @private - */ -function _removeFileAsync(fdPath, next) { - const _handler = function (err) { - if (err && !_isENOENT(err)) { - // reraise any unanticipated error - return next(err); - } - next(); - }; +const rmkids = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') - if (0 <= fdPath[0]) - fs.close(fdPath[0], function () { - fs.unlink(fdPath[1], _handler); - }); - else fs.unlink(fdPath[1], _handler); + options.readdir(p, (er, files) => { + if (er) + return cb(er) + let n = files.length + if (n === 0) + return options.rmdir(p, cb) + let errState + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) } -/** - * Removes files synchronously. - * - * @param {Object} fdPath - * @private - */ -function _removeFileSync(fdPath) { - let rethrownException = null; - try { - if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); - } catch (e) { - // reraise any unanticipated error - if (!_isEBADF(e) && !_isENOENT(e)) throw e; - } finally { +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +const rimrafSync = (p, options) => { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + let results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { try { - fs.unlinkSync(fdPath[1]); - } - catch (e) { - // reraise any unanticipated error - if (!_isENOENT(e)) rethrownException = e; + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) } } - if (rethrownException !== null) { - throw rethrownException; - } -} - -/** - * Prepares the callback for removal of the temporary file. - * - * Returns either a sync callback or a async callback depending on whether - * fileSync or file was called, which is expressed by the sync parameter. - * - * @param {string} name the path of the file - * @param {number} fd file descriptor - * @param {Object} opts - * @param {boolean} sync - * @returns {fileCallback | fileCallbackSync} - * @private - */ -function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { - const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); - const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); - - if (!opts.keep) _removeObjects.unshift(removeCallbackSync); - - return sync ? removeCallbackSync : removeCallback; -} - -/** - * Prepares the callback for removal of the temporary directory. - * - * Returns either a sync callback or a async callback depending on whether - * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. - * - * @param {string} name - * @param {Object} opts - * @param {boolean} sync - * @returns {Function} the callback - * @private - */ -function _prepareTmpDirRemoveCallback(name, opts, sync) { - const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); - const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; - const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); - const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); - if (!opts.keep) _removeObjects.unshift(removeCallbackSync); - - return sync ? removeCallbackSync : removeCallback; -} -/** - * Creates a guarded function wrapping the removeFunction call. - * - * The cleanup callback is save to be called multiple times. - * Subsequent invocations will be ignored. - * - * @param {Function} removeFunction - * @param {string} fileOrDirName - * @param {boolean} sync - * @param {cleanupCallbackSync?} cleanupCallbackSync - * @returns {cleanupCallback | cleanupCallbackSync} - * @private - */ -function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { - let called = false; + if (!results.length) + return - // if sync is true, the next parameter will be ignored - return function _cleanupCallback(next) { + for (let i = 0; i < results.length; i++) { + const p = results[i] - /* istanbul ignore else */ - if (!called) { - // remove cleanupCallback from cache - const toRemove = cleanupCallbackSync || _cleanupCallback; - const index = _removeObjects.indexOf(toRemove); - /* istanbul ignore else */ - if (index >= 0) _removeObjects.splice(index, 1); + let st + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return - called = true; - if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { - return removeFunction(fileOrDirName); - } else { - return removeFunction(fileOrDirName, next || function() {}); - } + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) } - }; -} - -/** - * The garbage collector. - * - * @private - */ -function _garbageCollector() { - /* istanbul ignore else */ - if (!_gracefulCleanup) return; - // the function being called removes itself from _removeObjects, - // loop until _removeObjects is empty - while (_removeObjects.length) { try { - _removeObjects[0](); - } catch (e) { - // already removed? + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) } } } -/** - * Random name generator based on crypto. - * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript - * - * @param {number} howMany - * @returns {string} the generated random name - * @private - */ -function _randomChars(howMany) { - let - value = [], - rnd = null; +const rmdirSync = (p, options, originalEr) => { + assert(p) + assert(options) - // make sure that we do not fail because we ran out of entropy try { - rnd = crypto.randomBytes(howMany); - } catch (e) { - rnd = crypto.pseudoRandomBytes(howMany); - } - - for (var i = 0; i < howMany; i++) { - value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) } - - return value.join(''); } -/** - * Helper which determines whether a string s is blank, that is undefined, or empty or null. - * - * @private - * @param {string} s - * @returns {Boolean} true whether the string s is blank, false otherwise - */ -function _isBlank(s) { - return s === null || _isUndefined(s) || !s.trim(); -} +const rmkidsSync = (p, options) => { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) -/** - * Checks whether the `obj` parameter is defined or not. - * - * @param {Object} obj - * @returns {boolean} true if the object is undefined - * @private - */ -function _isUndefined(obj) { - return typeof obj === 'undefined'; + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const retries = isWindows ? 100 : 1 + let i = 0 + do { + let threw = true + try { + const ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) } -/** - * Parses the function arguments. - * - * This function helps to have optional arguments. - * - * @param {(Options|null|undefined|Function)} options - * @param {?Function} callback - * @returns {Array} parsed arguments - * @private - */ -function _parseArguments(options, callback) { - /* istanbul ignore else */ - if (typeof options === 'function') { - return [{}, options]; - } +module.exports = rimraf +rimraf.sync = rimrafSync - /* istanbul ignore else */ - if (_isUndefined(options)) { - return [{}, callback]; - } - // copy options so we do not leak the changes we make internally - const actualOptions = {}; - for (const key of Object.getOwnPropertyNames(options)) { - actualOptions[key] = options[key]; - } +/***/ }), - return [actualOptions, callback]; -} +/***/ 16: +/***/ (function(module) { -/** - * Generates a new temporary name. - * - * @param {Object} opts - * @returns {string} the new random name according to opts - * @private - */ -function _generateTmpName(opts) { +module.exports = require("tls"); - const tmpDir = opts.tmpdir; +/***/ }), - /* istanbul ignore else */ - if (!_isUndefined(opts.name)) - return path.join(tmpDir, opts.dir, opts.name); +/***/ 49: +/***/ (function(module, __unusedexports, __webpack_require__) { - /* istanbul ignore else */ - if (!_isUndefined(opts.template)) - return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); +var wrappy = __webpack_require__(11) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) - // prefix and postfix - const name = [ - opts.prefix ? opts.prefix : 'tmp', - '-', - process.pid, - '-', - _randomChars(12), - opts.postfix ? '-' + opts.postfix : '' - ].join(''); +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) - return path.join(tmpDir, opts.dir, name); + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f } + +/***/ }), + +/***/ 82: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); /** - * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing - * options. - * - * @param {Options} options - * @private + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string */ -function _assertAndSanitizeOptions(options) { +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +//# sourceMappingURL=utils.js.map - options.tmpdir = _getTmpDir(options); +/***/ }), - const tmpDir = options.tmpdir; +/***/ 87: +/***/ (function(module) { - /* istanbul ignore else */ - if (!_isUndefined(options.name)) - _assertIsRelative(options.name, 'name', tmpDir); - /* istanbul ignore else */ - if (!_isUndefined(options.dir)) - _assertIsRelative(options.dir, 'dir', tmpDir); - /* istanbul ignore else */ - if (!_isUndefined(options.template)) { - _assertIsRelative(options.template, 'template', tmpDir); - if (!options.template.match(TEMPLATE_PATTERN)) - throw new Error(`Invalid template, found "${options.template}".`); - } - /* istanbul ignore else */ - if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) - throw new Error(`Invalid tries, found "${options.tries}".`); +module.exports = require("os"); - // if a name was specified we will try once - options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; - options.keep = !!options.keep; - options.detachDescriptor = !!options.detachDescriptor; - options.discardDescriptor = !!options.discardDescriptor; - options.unsafeCleanup = !!options.unsafeCleanup; +/***/ }), - // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to - options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); - options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); - // sanitize further if template is relative to options.dir - options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); +/***/ 93: +/***/ (function(module, __unusedexports, __webpack_require__) { - // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to - options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); - options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; - options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = __webpack_require__(622) +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __webpack_require__(306) + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } } -/** - * Resolve the specified path name in respect to tmpDir. - * - * The specified name might include relative path components, e.g. ../ - * so we need to resolve in order to be sure that is is located inside tmpDir - * - * @param name - * @param tmpDir - * @returns {string} - * @private - */ -function _resolvePath(name, tmpDir) { - const sanitizedName = _sanitizeName(name); - if (sanitizedName.startsWith(tmpDir)) { - return path.resolve(sanitizedName); - } else { - return path.resolve(path.join(tmpDir, sanitizedName)); - } +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) } -/** - * Sanitize the specified path name by removing all quote characters. - * - * @param name - * @returns {string} - * @private - */ -function _sanitizeName(name) { - if (_isBlank(name)) { - return name; +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) } - return name.replace(/["']/g, ''); } -/** - * Asserts whether specified name is relative to the specified tmpDir. - * - * @param {string} name - * @param {string} option - * @param {string} tmpDir - * @throws {Error} - * @private - */ -function _assertIsRelative(name, option, tmpDir) { - if (option === 'name') { - // assert that name is not absolute and does not contain a path - if (path.isAbsolute(name)) - throw new Error(`${option} option must not contain an absolute path, found "${name}".`); - // must not fail on valid . or .. or similar such constructs - let basename = path.basename(name); - if (basename === '..' || basename === '.' || basename !== name) - throw new Error(`${option} option must not contain a path, found "${name}".`); +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) } - else { // if (option === 'dir' || option === 'template') { - // assert that dir or template are relative to tmpDir - if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { - throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); - } - let resolvedPath = _resolvePath(name, tmpDir); - if (!resolvedPath.startsWith(tmpDir)) - throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) } -} -/** - * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. - * - * @private - */ -function _isEBADF(error) { - return _isExpectedError(error, -EBADF, 'EBADF'); + return m } -/** - * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. - * - * @private - */ -function _isENOENT(error) { - return _isExpectedError(error, -ENOENT, 'ENOENT'); +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch } -/** - * Helper to determine whether the expected error code matches the actual code and errno, - * which will differ between the supported node versions. - * - * - Node >= 7.0: - * error.code {string} - * error.errno {number} any numerical value will be negated - * - * CAVEAT - * - * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT - * is no different here. - * - * @param {SystemError} error - * @param {number} errno - * @param {string} code - * @private - */ -function _isExpectedError(error, errno, code) { - return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; -} +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } -/** - * Sets the graceful cleanup. - * - * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the - * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary - * object removals. - */ -function setGracefulCleanup() { - _gracefulCleanup = true; -} + if (!options) options = {} -/** - * Returns the currently configured tmp dir from os.tmpdir(). - * - * @private - * @param {?Options} options - * @returns {string} the currently configured tmp dir - */ -function _getTmpDir(options) { - return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) } -// Install process exit listener -process.addListener(EXIT, _garbageCollector); +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } -/** - * Configuration options. - * - * @typedef {Object} Options - * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected - * @property {?number} tries the number of tries before give up the name generation - * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files - * @property {?string} template the "mkstemp" like filename template - * @property {?string} name fixed name relative to tmpdir or the specified dir option - * @property {?string} dir tmp directory relative to the root tmp directory in use - * @property {?string} prefix prefix for the generated name - * @property {?string} postfix postfix for the generated name - * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir - * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty - * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection - * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection - */ + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } -/** - * @typedef {Object} FileSyncObject - * @property {string} name the name of the file - * @property {string} fd the file descriptor or -1 if the fd has been discarded - * @property {fileCallback} removeCallback the callback function to remove the file - */ + if (!options) options = {} + pattern = pattern.trim() -/** - * @typedef {Object} DirSyncObject - * @property {string} name the name of the directory - * @property {fileCallback} removeCallback the callback function to remove the directory - */ + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } -/** - * @callback tmpNameCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - */ + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false -/** - * @callback fileCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {number} fd the file descriptor or -1 if the fd had been discarded - * @param {cleanupCallback} fn the cleanup callback function - */ + // make the set of regexps etc. + this.make() +} -/** - * @callback fileCallbackSync - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {number} fd the file descriptor or -1 if the fd had been discarded - * @param {cleanupCallbackSync} fn the cleanup callback function - */ +Minimatch.prototype.debug = function () {} -/** - * @callback dirCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {cleanupCallback} fn the cleanup callback function - */ +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return -/** - * @callback dirCallbackSync - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {cleanupCallbackSync} fn the cleanup callback function - */ + var pattern = this.pattern + var options = this.options -/** - * Removes the temporary created file or directory. - * - * @callback cleanupCallback - * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed - */ + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } -/** - * Removes the temporary created file or directory. - * - * @callback cleanupCallbackSync - */ + // step 1: figure out negation, etc. + this.parseNegate() -/** - * Callback function for function composition. - * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} - * - * @callback simpleCallback - */ + // step 2: expand braces + var set = this.globSet = this.braceExpand() -// exporting all the needed methods + if (options.debug) this.debug = console.error -// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will -// allow users to reconfigure the temporary directory -Object.defineProperty(module.exports, 'tmpdir', { - enumerable: true, - configurable: false, - get: function () { - return _getTmpDir(); - } -}); + this.debug(this.pattern, set) -module.exports.dir = dir; -module.exports.dirSync = dirSync; + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) -module.exports.file = file; -module.exports.fileSync = fileSync; + this.debug(this.pattern, set) -module.exports.tmpName = tmpName; -module.exports.tmpNameSync = tmpNameSync; + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) -module.exports.setGracefulCleanup = setGracefulCleanup; + this.debug(this.pattern, set) + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) -/***/ }), + this.debug(this.pattern, set) -/***/ 30: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + this.set = set +} -"use strict"; +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getUploadSpecification = void 0; -const fs = __importStar(__webpack_require__(747)); -const core_1 = __webpack_require__(676); -const path_1 = __webpack_require__(622); -const path_and_artifact_name_validation_1 = __webpack_require__(547); -/** - * Creates a specification that describes how each file that is part of the artifact will be uploaded - * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server - * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file - * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact - */ -function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { - // artifact name was checked earlier on, no need to check again - const specifications = []; - if (!fs.existsSync(rootDirectory)) { - throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); - } - if (!fs.lstatSync(rootDirectory).isDirectory()) { - throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); - } - // Normalize and resolve, this allows for either absolute or relative paths to be used - rootDirectory = path_1.normalize(rootDirectory); - rootDirectory = path_1.resolve(rootDirectory); - /* - Example to demonstrate behavior - - Input: - artifactName: my-artifact - rootDirectory: '/home/user/files/plz-upload' - artifactFiles: [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ] - - Output: - specifications: [ - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] - ] - */ - for (let file of artifactFiles) { - if (!fs.existsSync(file)) { - throw new Error(`File ${file} does not exist`); - } - if (!fs.lstatSync(file).isDirectory()) { - // Normalize and resolve, this allows for either absolute or relative paths to be used - file = path_1.normalize(file); - file = path_1.resolve(file); - if (!file.startsWith(rootDirectory)) { - throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); - } - // Check for forbidden characters in file paths that will be rejected during upload - const uploadPath = file.replace(rootDirectory, ''); - path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); - /* - uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all - be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts - - path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt - join('artifact-name/', 'file-to-upload.txt') - join('artifact-name/', '/file-to-upload.txt') - join('artifact-name', 'file-to-upload.txt') - join('artifact-name', '/file-to-upload.txt') - */ - specifications.push({ - absoluteFilePath: file, - uploadFilePath: path_1.join(artifactName, uploadPath) - }); - } - else { - // Directories are rejected by the server during upload - core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); - } - } - return specifications; -} -exports.getUploadSpecification = getUploadSpecification; -//# sourceMappingURL=upload-specification.js.map - -/***/ }), - -/***/ 50: -/***/ (function(module) { + if (options.nonegate) return -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} - return wrapper +Minimatch.prototype.braceExpand = braceExpand - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} } - return ret } -} + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern -/***/ }), + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } -/***/ 51: -/***/ (function(module, __unusedexports, __webpack_require__) { + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } -module.exports = globSync -globSync.GlobSync = GlobSync + return expand(pattern) +} -var fs = __webpack_require__(747) -var rp = __webpack_require__(909) -var minimatch = __webpack_require__(727) -var Minimatch = minimatch.Minimatch -var Glob = __webpack_require__(93).Glob -var util = __webpack_require__(669) -var path = __webpack_require__(622) -var assert = __webpack_require__(357) -var isAbsolute = __webpack_require__(409) -var common = __webpack_require__(459) -var alphasort = common.alphasort -var alphasorti = common.alphasorti -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') + var options = this.options - return new GlobSync(pattern, options).found -} + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) - setopts(this, pattern, options) + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } - if (this.noprocess) - return this + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) - } - this._finish() -} + case '\\': + clearStateChar() + escaping = true + continue -GlobSync.prototype._finish = function () { - assert(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { - try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue } - } - }) - } - common.finish(this) -} + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert(this instanceof GlobSync) + case '(': + if (inClass) { + re += '(' + continue + } - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. + if (!stateChar) { + re += '\\(' + continue + } - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue - var remain = pattern.slice(n) + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix + clearStateChar() + re += '|' + continue - var abs = this._makeAbs(read) + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() - //if ignored, skip processing - if (childrenIgnored(this, read)) - return + if (inClass) { + re += '\\' + c + continue + } - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) -} + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } - // if the abs isn't a dir, then nothing can match! - if (!entries) - return + // finish up the class. + hasMagic = true + inClass = false + re += c + continue - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' + default: + // swallow any state char that wasn't consumed + clearStateChar() - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return + re += c - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. + } // switch + } // for - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' } - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return - } + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail } -} + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } - var abs = this._makeAbs(e) + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] - if (this.mark) - e = this._mark(e) + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) - if (this.absolute) { - e = abs - } + nlLast += nlAfter - if (this.matches[index][e]) - return + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe } - this.matches[index][e] = true + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } - if (this.stat) - this._stat(e) -} + if (addPatternStart) { + re = patternStart + re + } + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } - var entries - var lstat - var stat + var flags = options.nocase ? 'i' : '' try { - lstat = fs.lstatSync(abs) + var regExp = new RegExp('^' + re + '$', flags) } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null - } + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') } - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) + regExp._glob = pattern + regExp._src = re - return entries + return regExp } -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set - if (Array.isArray(c)) - return c + if (!set.length) { + this.regexp = false + return this.regexp } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' try { - return this._readdirEntries(abs, fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false } + return this.regexp } -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) } + return list +} - this.cache[abs] = entries +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' - // mark and cache dir-ness - return entries -} + if (f === '/' && partial) return true -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break + var options = this.options - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') } -} -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - - var entries = this._readdir(abs, inGlobStar) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) - - var len = entries.length - var isSym = this.symlinks[abs] - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) + var set = this.set + this.debug(this.pattern, 'set', set) - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break } -} - -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate } } - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate } -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return false +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) - if (Array.isArray(c)) - c = 'DIR' + this.debug('matchOne', file.length, pattern.length) - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] - if (needDir && c === 'FILE') - return false + this.debug(pattern, p, f) - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat - try { - lstat = fs.lstatSync(abs) - } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) - if (lstat && lstat.isSymbolicLink()) { - try { - stat = fs.statSync(abs) - } catch (er) { - stat = lstat + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true } - } else { - stat = lstat - } - } - - this.statCache[abs] = stat - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] - this.cache[abs] = this.cache[abs] || c + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - if (needDir && c === 'FILE') - return false + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } - return c -} + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) -} - -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} - - -/***/ }), - -/***/ 87: -/***/ (function(module) { - -module.exports = require("os"); - -/***/ }), - -/***/ 93: -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. - -module.exports = glob - -var fs = __webpack_require__(747) -var rp = __webpack_require__(909) -var minimatch = __webpack_require__(727) -var Minimatch = minimatch.Minimatch -var inherits = __webpack_require__(919) -var EE = __webpack_require__(614).EventEmitter -var path = __webpack_require__(622) -var assert = __webpack_require__(357) -var isAbsolute = __webpack_require__(409) -var globSync = __webpack_require__(51) -var common = __webpack_require__(459) -var alphasort = common.alphasort -var alphasorti = common.alphasorti -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = __webpack_require__(691) -var util = __webpack_require__(669) -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -var once = __webpack_require__(731) - -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} - - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) - } - - return new Glob(pattern, options, cb) -} - -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync - -// old api surface -glob.glob = glob + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p + } + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] + if (!hit) return false } - return origin -} - -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set - if (!pattern) - return false + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* - if (set.length > 1) + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! return true - - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd } - return false + // should be unreachable. + throw new Error('wtf?') } -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} - setopts(this, pattern, options) - this._didRealPath = false - // process each pattern in the minimatch set - var n = this.minimatch.set.length +/***/ }), - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) +/***/ 102: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) - } +"use strict"; - var self = this - this._processing = 0 +// For internal use, subject to change. +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__webpack_require__(747)); +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(82); +function issueCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueCommand = issueCommand; +//# sourceMappingURL=file-command.js.map - this._emitQueue = [] - this._processQueue = [] - this.paused = false +/***/ }), - if (this.noprocess) - return this +/***/ 112: +/***/ (function(__unusedmodule, exports) { - if (n === 0) - return done() - - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false - - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() - } - } - } -} - -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return - - if (this.realpath && !this._didRealpath) - return this._realpath() - - common.finish(this) - this.emit('end', this.found) -} - -Glob.prototype._realpath = function () { - if (this._didRealpath) - return - - this._didRealpath = true - - var n = this.matches.length - if (n === 0) - return this._finish() - - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) - - function next () { - if (--n === 0) - self._finish() - } -} - -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() - - var found = Object.keys(matchset) - var self = this - var n = found.length - - if (n === 0) - return cb() - - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here - - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) -} - -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} - -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} - -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') -} - -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } -} - -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } - } -} - -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') - - if (this.aborted) - return - - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return - } - - //console.error('PROCESS %d', this._processing, pattern) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) -} - -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return cb() - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - this._process([e].concat(remain), index, inGlobStar, cb) - } - cb() -} - -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return - - if (isIgnored(this, e)) - return - - if (this.paused) { - this._emitQueue.push([index, e]) - return - } - - var abs = isAbsolute(e) ? e : this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) - e = abs - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) - - this.emit('match', e) -} - -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return - - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) - - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) - - if (lstatcb) - fs.lstat(abs, lstatcb) - - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() - - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) - } -} - -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return - - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return - - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() - - if (Array.isArray(c)) - return cb(null, c) - } - - var self = this - fs.readdir(abs, readdirCb(this, abs, cb)) -} - -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) - } -} - -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return - - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - return cb(null, entries) -} - -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return - - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } - - return cb() -} - -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - - -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) - - var isSym = this.symlinks[abs] - var len = entries.length - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) - } - - cb() -} - -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) -} -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { - - //console.error('ps2', prefix, exists) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} - -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return cb() - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) - - if (needDir && c === 'FILE') - return cb() - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) - } - } - - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - fs.lstat(abs, statcb) - - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) - } - } -} - -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() - } - - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat - - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return cb() - - return cb(null, c, stat) -} - - -/***/ }), - -/***/ 128: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.digestForStream = exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0; -const crypto_1 = __importDefault(__webpack_require__(417)); -const fs_1 = __webpack_require__(747); -const core_1 = __webpack_require__(676); -const http_client_1 = __webpack_require__(582); -const auth_1 = __webpack_require__(479); -const config_variables_1 = __webpack_require__(750); -const crc64_1 = __importDefault(__webpack_require__(335)); -/** - * Returns a retry time in milliseconds that exponentially gets larger - * depending on the amount of retries that have been attempted - */ -function getExponentialRetryTimeInMilliseconds(retryCount) { - if (retryCount < 0) { - throw new Error('RetryCount should not be negative'); - } - else if (retryCount === 0) { - return config_variables_1.getInitialRetryIntervalInMilliseconds(); - } - const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; - const maxTime = minTime * config_variables_1.getRetryMultiplier(); - // returns a random number between the minTime (inclusive) and the maxTime (exclusive) - return Math.trunc(Math.random() * (maxTime - minTime) + minTime); -} -exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds; -/** - * Parses a env variable that is a number - */ -function parseEnvNumber(key) { - const value = Number(process.env[key]); - if (Number.isNaN(value) || value < 0) { - return undefined; - } - return value; -} -exports.parseEnvNumber = parseEnvNumber; -/** - * Various utility functions to help with the necessary API calls - */ -function getApiVersion() { - return '6.0-preview'; -} -exports.getApiVersion = getApiVersion; -function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode >= 200 && statusCode < 300; -} -exports.isSuccessStatusCode = isSuccessStatusCode; -function isForbiddenStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode === http_client_1.HttpCodes.Forbidden; -} -exports.isForbiddenStatusCode = isForbiddenStatusCode; -function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; - } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.GatewayTimeout, - http_client_1.HttpCodes.InternalServerError, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.TooManyRequests, - 413 // Payload Too Large - ]; - return retryableStatusCodes.includes(statusCode); -} -exports.isRetryableStatusCode = isRetryableStatusCode; -function isThrottledStatusCode(statusCode) { - if (!statusCode) { - return false; - } - return statusCode === http_client_1.HttpCodes.TooManyRequests; -} -exports.isThrottledStatusCode = isThrottledStatusCode; -/** - * Attempts to get the retry-after value from a set of http headers. The retry time - * is originally denoted in seconds, so if present, it is converted to milliseconds - * @param headers all the headers received when making an http call - */ -function tryGetRetryAfterValueTimeInMilliseconds(headers) { - if (headers['retry-after']) { - const retryTime = Number(headers['retry-after']); - if (!isNaN(retryTime)) { - core_1.info(`Retry-After header is present with a value of ${retryTime}`); - return retryTime * 1000; - } - core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); - return undefined; - } - core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`); - // eslint-disable-next-line no-console - console.log(headers); - return undefined; -} -exports.tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds; -function getContentRange(start, end, total) { - // Format: `bytes start-end/fileSize - // start and end are inclusive - // For a 200 byte chunk starting at byte 0: - // Content-Range: bytes 0-199/200 - return `bytes ${start}-${end}/${total}`; -} -exports.getContentRange = getContentRange; -/** - * Sets all the necessary headers when downloading an artifact - * @param {string} contentType the type of content being uploaded - * @param {boolean} isKeepAlive is the same connection being used to make multiple calls - * @param {boolean} acceptGzip can we accept a gzip encoded response - * @param {string} acceptType the type of content that we can accept - * @returns appropriate headers to make a specific http call during artifact download - */ -function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) { - const requestOptions = {}; - if (contentType) { - requestOptions['Content-Type'] = contentType; - } - if (isKeepAlive) { - requestOptions['Connection'] = 'Keep-Alive'; - // keep alive for at least 10 seconds before closing the connection - requestOptions['Keep-Alive'] = '10'; - } - if (acceptGzip) { - // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header - requestOptions['Accept-Encoding'] = 'gzip'; - requestOptions['Accept'] = `application/octet-stream;api-version=${getApiVersion()}`; - } - else { - // default to application/json if we are not working with gzip content - requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; - } - return requestOptions; -} -exports.getDownloadHeaders = getDownloadHeaders; -/** - * Sets all the necessary headers when uploading an artifact - * @param {string} contentType the type of content being uploaded - * @param {boolean} isKeepAlive is the same connection being used to make multiple calls - * @param {boolean} isGzip is the connection being used to upload GZip compressed content - * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed - * @param {number} contentLength the length of the content that is being uploaded - * @param {string} contentRange the range of the content that is being uploaded - * @returns appropriate headers to make a specific http call during artifact upload - */ -function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange, digest) { - const requestOptions = {}; - requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; - if (contentType) { - requestOptions['Content-Type'] = contentType; - } - if (isKeepAlive) { - requestOptions['Connection'] = 'Keep-Alive'; - // keep alive for at least 10 seconds before closing the connection - requestOptions['Keep-Alive'] = '10'; - } - if (isGzip) { - requestOptions['Content-Encoding'] = 'gzip'; - requestOptions['x-tfs-filelength'] = uncompressedLength; - } - if (contentLength) { - requestOptions['Content-Length'] = contentLength; - } - if (contentRange) { - requestOptions['Content-Range'] = contentRange; - } - if (digest) { - requestOptions['x-actions-results-crc64'] = digest.crc64; - requestOptions['x-actions-results-md5'] = digest.md5; - } - return requestOptions; -} -exports.getUploadHeaders = getUploadHeaders; -function createHttpClient(userAgent) { - return new http_client_1.HttpClient(userAgent, [ - new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken()) - ]); -} -exports.createHttpClient = createHttpClient; -function getArtifactUrl() { - const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; - core_1.debug(`Artifact Url: ${artifactUrl}`); - return artifactUrl; -} -exports.getArtifactUrl = getArtifactUrl; -/** - * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information - * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but - * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only - * the information that we want. - * - * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. - * Other information such as the headers, the response code and message might be useful, so this is displayed. - */ -function displayHttpDiagnostics(response) { - core_1.info(`##### Begin Diagnostic HTTP information ##### -Status Code: ${response.message.statusCode} -Status Message: ${response.message.statusMessage} -Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} -###### End Diagnostic HTTP information ######`); -} -exports.displayHttpDiagnostics = displayHttpDiagnostics; -function createDirectoriesForArtifact(directories) { - return __awaiter(this, void 0, void 0, function* () { - for (const directory of directories) { - yield fs_1.promises.mkdir(directory, { - recursive: true - }); - } - }); -} -exports.createDirectoriesForArtifact = createDirectoriesForArtifact; -function createEmptyFilesForArtifact(emptyFilesToCreate) { - return __awaiter(this, void 0, void 0, function* () { - for (const filePath of emptyFilesToCreate) { - yield (yield fs_1.promises.open(filePath, 'w')).close(); - } - }); -} -exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact; -function getFileSize(filePath) { - return __awaiter(this, void 0, void 0, function* () { - const stats = yield fs_1.promises.stat(filePath); - core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); - return stats.size; - }); -} -exports.getFileSize = getFileSize; -function rmFile(filePath) { - return __awaiter(this, void 0, void 0, function* () { - yield fs_1.promises.unlink(filePath); - }); -} -exports.rmFile = rmFile; -function getProperRetention(retentionInput, retentionSetting) { - if (retentionInput < 0) { - throw new Error('Invalid retention, minimum value is 1.'); - } - let retention = retentionInput; - if (retentionSetting) { - const maxRetention = parseInt(retentionSetting); - if (!isNaN(maxRetention) && maxRetention < retention) { - core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); - retention = maxRetention; - } - } - return retention; -} -exports.getProperRetention = getProperRetention; -function sleep(milliseconds) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => setTimeout(resolve, milliseconds)); - }); -} -exports.sleep = sleep; -function digestForStream(stream) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - const crc64 = new crc64_1.default(); - const md5 = crypto_1.default.createHash('md5'); - stream - .on('data', data => { - crc64.update(data); - md5.update(data); - }) - .on('end', () => resolve({ - crc64: crc64.digest('base64'), - md5: md5.digest('base64') - })) - .on('error', reject); - }); - }); -} -exports.digestForStream = digestForStream; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 139: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0; -const fs = __importStar(__webpack_require__(747)); -const zlib = __importStar(__webpack_require__(761)); -const util_1 = __webpack_require__(669); -const stat = util_1.promisify(fs.stat); -/** - * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip - * files then will just waste a lot of time before ultimately being discarded (especially for very large files). - * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is - */ -const gzipExemptFileExtensions = [ - '.gzip', - '.zip', - '.tar.lz', - '.tar.gz', - '.tar.bz2', - '.7z' -]; -/** - * Creates a Gzip compressed file of an original file at the provided temporary filepath location - * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified - * @param {string} tempFilePath the location of where the Gzip file will be created - * @returns the size of gzip file that gets created - */ -function createGZipFileOnDisk(originalFilePath, tempFilePath) { - return __awaiter(this, void 0, void 0, function* () { - for (const gzipExemptExtension of gzipExemptFileExtensions) { - if (originalFilePath.endsWith(gzipExemptExtension)) { - // return a really large number so that the original file gets uploaded - return Number.MAX_SAFE_INTEGER; - } - } - return new Promise((resolve, reject) => { - const inputStream = fs.createReadStream(originalFilePath); - const gzip = zlib.createGzip(); - const outputStream = fs.createWriteStream(tempFilePath); - inputStream.pipe(gzip).pipe(outputStream); - outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { - // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload - const size = (yield stat(tempFilePath)).size; - resolve(size); - })); - outputStream.on('error', error => { - // eslint-disable-next-line no-console - console.log(error); - reject; - }); - }); - }); -} -exports.createGZipFileOnDisk = createGZipFileOnDisk; -/** - * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O - * @param originalFilePath the path to the original file that is being GZipped - * @returns a buffer with the GZip file - */ -function createGZipFileInBuffer(originalFilePath) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - var e_1, _a; - const inputStream = fs.createReadStream(originalFilePath); - const gzip = zlib.createGzip(); - inputStream.pipe(gzip); - // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 - const chunks = []; - try { - for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { - const chunk = gzip_1_1.value; - chunks.push(chunk); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); - } - finally { if (e_1) throw e_1.error; } - } - resolve(Buffer.concat(chunks)); - })); - }); -} -exports.createGZipFileInBuffer = createGZipFileInBuffer; -//# sourceMappingURL=upload-gzip.js.map - -/***/ }), - -/***/ 166: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const internal_globber_1 = __webpack_require__(941); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); -} -exports.create = create; -//# sourceMappingURL=glob.js.map - -/***/ }), - -/***/ 171: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var concatMap = __webpack_require__(857); -var balanced = __webpack_require__(507); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} - -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} - -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} - - -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; - - var parts = []; - var m = balanced('{', '}', str); - - if (!m) - return str.split(','); - - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); - - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } - - parts.push.apply(parts, p); - - return parts; -} - -function expandTop(str) { - if (!str) - return []; - - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } - - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function identity(e) { - return e; -} - -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} - -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} - -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } - - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - - var N; - - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - - N = []; - - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } - - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - - return expansions; -} - - - -/***/ }), - -/***/ 175: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = __webpack_require__(357); -const path = __webpack_require__(622); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. - * - * For example, on Linux/macOS: - * - `/ => /` - * - `/hello => /` - * - * For example, on Windows: - * - `C:\ => C:\` - * - `C:\hello => C:\` - * - `C: => C:` - * - `C:hello => C:` - * - `\ => \` - * - `\hello => \` - * - `\\hello => \\hello` - * - `\\hello\world => \\hello\world` - */ -function dirname(p) { - // Normalize slashes and trim unnecessary trailing slash - p = safeTrimTrailingSeparator(p); - // Windows UNC root, e.g. \\hello or \\hello\world - if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { - return p; - } - // Get dirname - let result = path.dirname(p); - // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ - if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { - result = safeTrimTrailingSeparator(result); - } - return result; -} -exports.dirname = dirname; -/** - * Roots the path if not already rooted. On Windows, relative roots like `\` - * or `C:` are expanded based on the current working directory. - */ -function ensureAbsoluteRoot(root, itemPath) { - assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); - assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); - // Already rooted - if (hasAbsoluteRoot(itemPath)) { - return itemPath; - } - // Windows - if (IS_WINDOWS) { - // Check for itemPath like C: or C:foo - if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { - let cwd = process.cwd(); - assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - // Drive letter matches cwd? Expand to cwd - if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { - // Drive only, e.g. C: - if (itemPath.length === 2) { - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}`; - } - // Drive + path, e.g. C:foo - else { - if (!cwd.endsWith('\\')) { - cwd += '\\'; - } - // Preserve specified drive letter case (upper or lower) - return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; - } - } - // Different drive - else { - return `${itemPath[0]}:\\${itemPath.substr(2)}`; - } - } - // Check for itemPath like \ or \foo - else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { - const cwd = process.cwd(); - assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); - return `${cwd[0]}:\\${itemPath.substr(1)}`; - } - } - assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); - // Otherwise ensure root ends with a separator - if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { - // Intentionally empty - } - else { - // Append separator - root += path.sep; - } - return root + itemPath; -} -exports.ensureAbsoluteRoot = ensureAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\\hello\share` and `C:\hello` (and using alternate separator). - */ -function hasAbsoluteRoot(itemPath) { - assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \\hello\share or C:\hello - return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasAbsoluteRoot = hasAbsoluteRoot; -/** - * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: - * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). - */ -function hasRoot(itemPath) { - assert(itemPath, `isRooted parameter 'itemPath' must not be empty`); - // Normalize separators - itemPath = normalizeSeparators(itemPath); - // Windows - if (IS_WINDOWS) { - // E.g. \ or \hello or \\hello - // E.g. C: or C:\hello - return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); - } - // E.g. /hello - return itemPath.startsWith('/'); -} -exports.hasRoot = hasRoot; -/** - * Removes redundant slashes and converts `/` to `\` on Windows - */ -function normalizeSeparators(p) { - p = p || ''; - // Windows - if (IS_WINDOWS) { - // Convert slashes on Windows - p = p.replace(/\//g, '\\'); - // Remove redundant slashes - const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello - return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC - } - // Remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -exports.normalizeSeparators = normalizeSeparators; -/** - * Normalizes the path separators and trims the trailing separator (when safe). - * For example, `/foo/ => /foo` but `/ => /` - */ -function safeTrimTrailingSeparator(p) { - // Short-circuit if empty - if (!p) { - return ''; - } - // Normalize separators - p = normalizeSeparators(p); - // No trailing slash - if (!p.endsWith(path.sep)) { - return p; - } - // Check '/' on Linux/macOS and '\' on Windows - if (p === path.sep) { - return p; - } - // On Windows check if drive root. E.g. C:\ - if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { - return p; - } - // Otherwise trim trailing slash - return p.substr(0, p.length - 1); -} -exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; -//# sourceMappingURL=internal-path-helper.js.map - -/***/ }), - -/***/ 211: -/***/ (function(module) { - -module.exports = require("https"); - -/***/ }), - -/***/ 230: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -exports.checkBypass = exports.getProxyUrl = void 0; -function getProxyUrl(reqUrl) { - const usingSsl = reqUrl.protocol === 'https:'; - if (checkBypass(reqUrl)) { - return undefined; - } - const proxyVar = (() => { - if (usingSsl) { - return process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - return process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - })(); - if (proxyVar) { - return new URL(proxyVar); - } - else { - return undefined; - } -} -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - const upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (const upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } - } - return false; -} -exports.checkBypass = checkBypass; -//# sourceMappingURL=proxy.js.map - -/***/ }), - -/***/ 242: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -class SearchState { - constructor(path, level) { - this.path = path; - this.level = level; - } -} -exports.SearchState = SearchState; -//# sourceMappingURL=internal-search-state.js.map - -/***/ }), - -/***/ 254: -/***/ (function(module, __unusedexports, __webpack_require__) { - -const assert = __webpack_require__(357) -const path = __webpack_require__(622) -const fs = __webpack_require__(747) -let glob = undefined -try { - glob = __webpack_require__(93) -} catch (_err) { - // treat glob as optional. -} - -const defaultGlobOpts = { - nosort: true, - silent: true -} - -// for EMFILE handling -let timeout = 0 - -const isWindows = (process.platform === "win32") - -const defaults = options => { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 - options.emfileWait = options.emfileWait || 1000 - if (options.glob === false) { - options.disableGlob = true - } - if (options.disableGlob !== true && glob === undefined) { - throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') - } - options.disableGlob = options.disableGlob || false - options.glob = options.glob || defaultGlobOpts -} - -const rimraf = (p, options, cb) => { - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert.equal(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - let busyTries = 0 - let errState = null - let n = 0 - - const next = (er) => { - errState = errState || er - if (--n === 0) - cb(errState) - } - - const afterGlob = (er, results) => { - if (er) - return cb(er) - - n = results.length - if (n === 0) - return cb() - - results.forEach(p => { - const CB = (er) => { - if (er) { - if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && - busyTries < options.maxBusyTries) { - busyTries ++ - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) - } - - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < options.emfileWait) { - return setTimeout(() => rimraf_(p, options, CB), timeout ++) - } - - // already gone - if (er.code === "ENOENT") er = null - } - - timeout = 0 - next(er) - } - rimraf_(p, options, CB) - }) - } - - if (options.disableGlob || !glob.hasMagic(p)) - return afterGlob(null, [p]) - - options.lstat(p, (er, stat) => { - if (!er) - return afterGlob(null, [p]) - - glob(p, options.glob, afterGlob) - }) - -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -const rimraf_ = (p, options, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === "ENOENT") - return cb(null) - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === "EPERM" && isWindows) - fixWinEPERM(p, options, er, cb) - - if (st && st.isDirectory()) - return rmdir(p, options, er, cb) - - options.unlink(p, er => { - if (er) { - if (er.code === "ENOENT") - return cb(null) - if (er.code === "EPERM") - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - if (er.code === "EISDIR") - return rmdir(p, options, er, cb) - } - return cb(er) - }) - }) -} - -const fixWinEPERM = (p, options, er, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - options.stat(p, (er3, stats) => { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, options, er, cb) - else - options.unlink(p, cb) - }) - }) -} - -const fixWinEPERMSync = (p, options, er) => { - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === "ENOENT") - return - else - throw er - } - - let stats - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === "ENOENT") - return - else - throw er - } - - if (stats.isDirectory()) - rmdirSync(p, options, er) - else - options.unlinkSync(p) -} - -const rmdir = (p, options, originalEr, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) - rmkids(p, options, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) -} - -const rmkids = (p, options, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) - return cb(er) - let n = files.length - if (n === 0) - return options.rmdir(p, cb) - let errState - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - options.rmdir(p, cb) - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -const rimrafSync = (p, options) => { - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - let results - - if (options.disableGlob || !glob.hasMagic(p)) { - results = [p] - } else { - try { - options.lstatSync(p) - results = [p] - } catch (er) { - results = glob.sync(p, options.glob) - } - } - - if (!results.length) - return - - for (let i = 0; i < results.length; i++) { - const p = results[i] - - let st - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - - // Windows can EPERM on stat. Life is suffering. - if (er.code === "EPERM" && isWindows) - fixWinEPERMSync(p, options, er) - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) - rmdirSync(p, options, null) - else - options.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - if (er.code !== "EISDIR") - throw er - - rmdirSync(p, options, er) - } - } -} - -const rmdirSync = (p, options, originalEr) => { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") - rmkidsSync(p, options) - } -} - -const rmkidsSync = (p, options) => { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const retries = isWindows ? 100 : 1 - let i = 0 - do { - let threw = true - try { - const ret = options.rmdirSync(p, options) - threw = false - return ret - } finally { - if (++i < retries && threw) - continue - } - } while (true) -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), - -/***/ 257: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = __webpack_require__(913); - - -/***/ }), - -/***/ 313: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var pathModule = __webpack_require__(622); -var isWindows = process.platform === 'win32'; -var fs = __webpack_require__(747); - -// JavaScript implementation of realpath, ported from node pre-v6 - -var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); - -function rethrow() { - // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and - // is fairly slow to generate. - var callback; - if (DEBUG) { - var backtrace = new Error; - callback = debugCallback; - } else - callback = missingCallback; - - return callback; - - function debugCallback(err) { - if (err) { - backtrace.message = err.message; - err = backtrace; - missingCallback(err); - } - } - - function missingCallback(err) { - if (err) { - if (process.throwDeprecation) - throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs - else if (!process.noDeprecation) { - var msg = 'fs: missing callback ' + (err.stack || err.message); - if (process.traceDeprecation) - console.trace(msg); - else - console.error(msg); - } - } - } -} - -function maybeCallback(cb) { - return typeof cb === 'function' ? cb : rethrow(); -} - -var normalize = pathModule.normalize; - -// Regexp that finds the next partion of a (partial) path -// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] -if (isWindows) { - var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; -} else { - var nextPartRe = /(.*?)(?:[\/]+|$)/g; -} - -// Regex to find the device root, including trailing slash. E.g. 'c:\\'. -if (isWindows) { - var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; -} else { - var splitRootRe = /^[\/]*/; -} - -exports.realpathSync = function realpathSync(p, cache) { - // make p is absolute - p = pathModule.resolve(p); - - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return cache[p]; - } - - var original = p, - seenLinks = {}, - knownHard = {}; - - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; - - start(); - - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; - - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstatSync(base); - knownHard[base] = true; - } - } - - // walk down the path, swapping out linked pathparts for their real - // values - // NB: p.length changes. - while (pos < p.length) { - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - continue; - } - - var resolvedLink; - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // some known symbolic link. no need to stat again. - resolvedLink = cache[base]; - } else { - var stat = fs.lstatSync(base); - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - continue; - } - - // read the link if it wasn't read before - // dev/ino always return 0 on windows, so skip the check. - var linkTarget = null; - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - linkTarget = seenLinks[id]; - } - } - if (linkTarget === null) { - fs.statSync(base); - linkTarget = fs.readlinkSync(base); - } - resolvedLink = pathModule.resolve(previous, linkTarget); - // track this, if given a cache. - if (cache) cache[base] = resolvedLink; - if (!isWindows) seenLinks[id] = linkTarget; - } - - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } - - if (cache) cache[original] = p; - - return p; -}; - - -exports.realpath = function realpath(p, cache, cb) { - if (typeof cb !== 'function') { - cb = maybeCallback(cache); - cache = null; - } - - // make p is absolute - p = pathModule.resolve(p); - - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return process.nextTick(cb.bind(null, null, cache[p])); - } - - var original = p, - seenLinks = {}, - knownHard = {}; - - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; - - start(); - - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; - - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstat(base, function(err) { - if (err) return cb(err); - knownHard[base] = true; - LOOP(); - }); - } else { - process.nextTick(LOOP); - } - } - - // walk down the path, swapping out linked pathparts for their real - // values - function LOOP() { - // stop if scanned past end of path - if (pos >= p.length) { - if (cache) cache[original] = p; - return cb(null, p); - } - - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - return process.nextTick(LOOP); - } - - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // known symbolic link. no need to stat again. - return gotResolvedLink(cache[base]); - } - - return fs.lstat(base, gotStat); - } - - function gotStat(err, stat) { - if (err) return cb(err); - - // if not a symlink, skip to the next path part - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - return process.nextTick(LOOP); - } - - // stat & read the link if not read before - // call gotTarget as soon as the link target is known - // dev/ino always return 0 on windows, so skip the check. - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - return gotTarget(null, seenLinks[id], base); - } - } - fs.stat(base, function(err) { - if (err) return cb(err); - - fs.readlink(base, function(err, target) { - if (!isWindows) seenLinks[id] = target; - gotTarget(err, target); - }); - }); - } - - function gotTarget(err, target, base) { - if (err) return cb(err); - - var resolvedLink = pathModule.resolve(previous, target); - if (cache) cache[base] = resolvedLink; - gotResolvedLink(resolvedLink); - } - - function gotResolvedLink(resolvedLink) { - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } -}; - - -/***/ }), - -/***/ 318: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = __webpack_require__(357); -const path = __webpack_require__(622); -const pathHelper = __webpack_require__(175); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Helper class for parsing paths into segments - */ -class Path { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - // String - if (typeof itemPath === 'string') { - assert(itemPath, `Parameter 'itemPath' must not be empty`); - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // Not rooted - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path.sep); - } - // Rooted - else { - // Add all segments, while not at the root - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - // Add the segment - const basename = path.basename(remaining); - this.segments.unshift(basename); - // Truncate the last segment - remaining = dir; - dir = pathHelper.dirname(remaining); - } - // Remainder is the root - this.segments.unshift(remaining); - } - } - // Array - else { - // Must not be empty - assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - // Each segment - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - // Must not be empty - assert(segment, `Parameter 'itemPath' must not contain any empty segments`); - // Normalize slashes - segment = pathHelper.normalizeSeparators(itemPath[i]); - // Root segment - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } - // All other segments - else { - // Must not contain slash - assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } - } - } - } - /** - * Converts the path to it's string representation - */ - toString() { - // First segment - let result = this.segments[0]; - // All others - let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } - else { - result += path.sep; - } - result += this.segments[i]; - } - return result; - } -} -exports.Path = Path; -//# sourceMappingURL=internal-path.js.map - -/***/ }), - -/***/ 335: -/***/ (function(__unusedmodule, exports) { - -"use strict"; +"use strict"; /** * CRC64: cyclic redundancy check, 64-bits @@ -4576,126 +1784,2653 @@ class CRC64 { constructor() { this._crc = BigInt(0); } - update(data) { - const buffer = typeof data === 'string' ? Buffer.from(data) : data; - let crc = CRC64.flip64Bits(this._crc); - for (const dataByte of buffer) { - const crcByte = Number(crc & BigInt(0xff)); - crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8)); - } - this._crc = CRC64.flip64Bits(crc); + update(data) { + const buffer = typeof data === 'string' ? Buffer.from(data) : data; + let crc = CRC64.flip64Bits(this._crc); + for (const dataByte of buffer) { + const crcByte = Number(crc & BigInt(0xff)); + crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8)); + } + this._crc = CRC64.flip64Bits(crc); + } + digest(encoding) { + switch (encoding) { + case 'hex': + return this._crc.toString(16).toUpperCase(); + case 'base64': + return this.toBuffer().toString('base64'); + default: + return this.toBuffer(); + } + } + toBuffer() { + return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff)))); + } + static flip64Bits(n) { + return (BigInt(1) << BigInt(64)) - BigInt(1) - n; + } +} +exports.default = CRC64; +//# sourceMappingURL=crc64.js.map + +/***/ }), + +/***/ 117: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = __webpack_require__(622); +var isWindows = process.platform === 'win32'; +var fs = __webpack_require__(747); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; + + +/***/ }), + +/***/ 141: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 150: +/***/ (function(module, __unusedexports, __webpack_require__) { + +/*! + * Tmp + * + * Copyright (c) 2011-2017 KARASZI Istvan + * + * MIT Licensed + */ + +/* + * Module dependencies. + */ +const fs = __webpack_require__(747); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +const crypto = __webpack_require__(417); +const _c = { fs: fs.constants, os: os.constants }; +const rimraf = __webpack_require__(13); + +/* + * The working inner variables. + */ +const + // the random characters to choose from + RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', + + TEMPLATE_PATTERN = /XXXXXX/, + + DEFAULT_TRIES = 3, + + CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), + + // constants are off on the windows platform and will not match the actual errno codes + IS_WIN32 = os.platform() === 'win32', + EBADF = _c.EBADF || _c.os.errno.EBADF, + ENOENT = _c.ENOENT || _c.os.errno.ENOENT, + + DIR_MODE = 0o700 /* 448 */, + FILE_MODE = 0o600 /* 384 */, + + EXIT = 'exit', + + // this will hold the objects need to be removed on exit + _removeObjects = [], + + // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback + FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), + FN_RIMRAF_SYNC = rimraf.sync; + +let + _gracefulCleanup = false; + +/** + * Gets a temporary file name. + * + * @param {(Options|tmpNameCallback)} options options or callback + * @param {?tmpNameCallback} callback the callback function + */ +function tmpName(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + try { + _assertAndSanitizeOptions(opts); + } catch (err) { + return cb(err); + } + + let tries = opts.tries; + (function _getUniqueName() { + try { + const name = _generateTmpName(opts); + + // check whether the path exists then retry if needed + fs.stat(name, function (err) { + /* istanbul ignore else */ + if (!err) { + /* istanbul ignore else */ + if (tries-- > 0) return _getUniqueName(); + + return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); + } + + cb(null, name); + }); + } catch (err) { + cb(err); + } + }()); +} + +/** + * Synchronous version of tmpName. + * + * @param {Object} options + * @returns {string} the generated random name + * @throws {Error} if the options are invalid or could not generate a filename + */ +function tmpNameSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + _assertAndSanitizeOptions(opts); + + let tries = opts.tries; + do { + const name = _generateTmpName(opts); + try { + fs.statSync(name); + } catch (e) { + return name; + } + } while (tries-- > 0); + + throw new Error('Could not get a unique tmp filename, max tries reached'); +} + +/** + * Creates and opens a temporary file. + * + * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined + * @param {?fileCallback} callback + */ +function file(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create and open the file + fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { + /* istanbu ignore else */ + if (err) return cb(err); + + if (opts.discardDescriptor) { + return fs.close(fd, function _discardCallback(possibleErr) { + // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only + return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); + }); + } else { + // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care + // about the descriptor + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); + } + }); + }); +} + +/** + * Synchronous version of file. + * + * @param {Options} options + * @returns {FileSyncObject} object consists of name, fd and removeCallback + * @throws {Error} if cannot create a file + */ +function fileSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + const name = tmpNameSync(opts); + var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + /* istanbul ignore else */ + if (opts.discardDescriptor) { + fs.closeSync(fd); + fd = undefined; + } + + return { + name: name, + fd: fd, + removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) + }; +} + +/** + * Creates a temporary directory. + * + * @param {(Options|dirCallback)} options the options or the callback function + * @param {?dirCallback} callback + */ +function dir(options, callback) { + const + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create the directory + fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { + /* istanbul ignore else */ + if (err) return cb(err); + + cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); + }); + }); +} + +/** + * Synchronous version of dir. + * + * @param {Options} options + * @returns {DirSyncObject} object consists of name and removeCallback + * @throws {Error} if it cannot create a directory + */ +function dirSync(options) { + const + args = _parseArguments(options), + opts = args[0]; + + const name = tmpNameSync(opts); + fs.mkdirSync(name, opts.mode || DIR_MODE); + + return { + name: name, + removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) + }; +} + +/** + * Removes files asynchronously. + * + * @param {Object} fdPath + * @param {Function} next + * @private + */ +function _removeFileAsync(fdPath, next) { + const _handler = function (err) { + if (err && !_isENOENT(err)) { + // reraise any unanticipated error + return next(err); + } + next(); + }; + + if (0 <= fdPath[0]) + fs.close(fdPath[0], function () { + fs.unlink(fdPath[1], _handler); + }); + else fs.unlink(fdPath[1], _handler); +} + +/** + * Removes files synchronously. + * + * @param {Object} fdPath + * @private + */ +function _removeFileSync(fdPath) { + let rethrownException = null; + try { + if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); + } catch (e) { + // reraise any unanticipated error + if (!_isEBADF(e) && !_isENOENT(e)) throw e; + } finally { + try { + fs.unlinkSync(fdPath[1]); + } + catch (e) { + // reraise any unanticipated error + if (!_isENOENT(e)) rethrownException = e; + } + } + if (rethrownException !== null) { + throw rethrownException; + } +} + +/** + * Prepares the callback for removal of the temporary file. + * + * Returns either a sync callback or a async callback depending on whether + * fileSync or file was called, which is expressed by the sync parameter. + * + * @param {string} name the path of the file + * @param {number} fd file descriptor + * @param {Object} opts + * @param {boolean} sync + * @returns {fileCallback | fileCallbackSync} + * @private + */ +function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { + const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); + const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); + + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return sync ? removeCallbackSync : removeCallback; +} + +/** + * Prepares the callback for removal of the temporary directory. + * + * Returns either a sync callback or a async callback depending on whether + * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. + * + * @param {string} name + * @param {Object} opts + * @param {boolean} sync + * @returns {Function} the callback + * @private + */ +function _prepareTmpDirRemoveCallback(name, opts, sync) { + const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); + const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; + const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); + const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return sync ? removeCallbackSync : removeCallback; +} + +/** + * Creates a guarded function wrapping the removeFunction call. + * + * The cleanup callback is save to be called multiple times. + * Subsequent invocations will be ignored. + * + * @param {Function} removeFunction + * @param {string} fileOrDirName + * @param {boolean} sync + * @param {cleanupCallbackSync?} cleanupCallbackSync + * @returns {cleanupCallback | cleanupCallbackSync} + * @private + */ +function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { + let called = false; + + // if sync is true, the next parameter will be ignored + return function _cleanupCallback(next) { + + /* istanbul ignore else */ + if (!called) { + // remove cleanupCallback from cache + const toRemove = cleanupCallbackSync || _cleanupCallback; + const index = _removeObjects.indexOf(toRemove); + /* istanbul ignore else */ + if (index >= 0) _removeObjects.splice(index, 1); + + called = true; + if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { + return removeFunction(fileOrDirName); + } else { + return removeFunction(fileOrDirName, next || function() {}); + } + } + }; +} + +/** + * The garbage collector. + * + * @private + */ +function _garbageCollector() { + /* istanbul ignore else */ + if (!_gracefulCleanup) return; + + // the function being called removes itself from _removeObjects, + // loop until _removeObjects is empty + while (_removeObjects.length) { + try { + _removeObjects[0](); + } catch (e) { + // already removed? + } + } +} + +/** + * Random name generator based on crypto. + * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript + * + * @param {number} howMany + * @returns {string} the generated random name + * @private + */ +function _randomChars(howMany) { + let + value = [], + rnd = null; + + // make sure that we do not fail because we ran out of entropy + try { + rnd = crypto.randomBytes(howMany); + } catch (e) { + rnd = crypto.pseudoRandomBytes(howMany); + } + + for (var i = 0; i < howMany; i++) { + value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); + } + + return value.join(''); +} + +/** + * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * + * @private + * @param {string} s + * @returns {Boolean} true whether the string s is blank, false otherwise + */ +function _isBlank(s) { + return s === null || _isUndefined(s) || !s.trim(); +} + +/** + * Checks whether the `obj` parameter is defined or not. + * + * @param {Object} obj + * @returns {boolean} true if the object is undefined + * @private + */ +function _isUndefined(obj) { + return typeof obj === 'undefined'; +} + +/** + * Parses the function arguments. + * + * This function helps to have optional arguments. + * + * @param {(Options|null|undefined|Function)} options + * @param {?Function} callback + * @returns {Array} parsed arguments + * @private + */ +function _parseArguments(options, callback) { + /* istanbul ignore else */ + if (typeof options === 'function') { + return [{}, options]; + } + + /* istanbul ignore else */ + if (_isUndefined(options)) { + return [{}, callback]; + } + + // copy options so we do not leak the changes we make internally + const actualOptions = {}; + for (const key of Object.getOwnPropertyNames(options)) { + actualOptions[key] = options[key]; + } + + return [actualOptions, callback]; +} + +/** + * Generates a new temporary name. + * + * @param {Object} opts + * @returns {string} the new random name according to opts + * @private + */ +function _generateTmpName(opts) { + + const tmpDir = opts.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(opts.name)) + return path.join(tmpDir, opts.dir, opts.name); + + /* istanbul ignore else */ + if (!_isUndefined(opts.template)) + return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + + // prefix and postfix + const name = [ + opts.prefix ? opts.prefix : 'tmp', + '-', + process.pid, + '-', + _randomChars(12), + opts.postfix ? '-' + opts.postfix : '' + ].join(''); + + return path.join(tmpDir, opts.dir, name); +} + +/** + * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing + * options. + * + * @param {Options} options + * @private + */ +function _assertAndSanitizeOptions(options) { + + options.tmpdir = _getTmpDir(options); + + const tmpDir = options.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(options.name)) + _assertIsRelative(options.name, 'name', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.dir)) + _assertIsRelative(options.dir, 'dir', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.template)) { + _assertIsRelative(options.template, 'template', tmpDir); + if (!options.template.match(TEMPLATE_PATTERN)) + throw new Error(`Invalid template, found "${options.template}".`); + } + /* istanbul ignore else */ + if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) + throw new Error(`Invalid tries, found "${options.tries}".`); + + // if a name was specified we will try once + options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; + options.keep = !!options.keep; + options.detachDescriptor = !!options.detachDescriptor; + options.discardDescriptor = !!options.discardDescriptor; + options.unsafeCleanup = !!options.unsafeCleanup; + + // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); + options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); + // sanitize further if template is relative to options.dir + options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); + + // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); + options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; + options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; +} + +/** + * Resolve the specified path name in respect to tmpDir. + * + * The specified name might include relative path components, e.g. ../ + * so we need to resolve in order to be sure that is is located inside tmpDir + * + * @param name + * @param tmpDir + * @returns {string} + * @private + */ +function _resolvePath(name, tmpDir) { + const sanitizedName = _sanitizeName(name); + if (sanitizedName.startsWith(tmpDir)) { + return path.resolve(sanitizedName); + } else { + return path.resolve(path.join(tmpDir, sanitizedName)); + } +} + +/** + * Sanitize the specified path name by removing all quote characters. + * + * @param name + * @returns {string} + * @private + */ +function _sanitizeName(name) { + if (_isBlank(name)) { + return name; + } + return name.replace(/["']/g, ''); +} + +/** + * Asserts whether specified name is relative to the specified tmpDir. + * + * @param {string} name + * @param {string} option + * @param {string} tmpDir + * @throws {Error} + * @private + */ +function _assertIsRelative(name, option, tmpDir) { + if (option === 'name') { + // assert that name is not absolute and does not contain a path + if (path.isAbsolute(name)) + throw new Error(`${option} option must not contain an absolute path, found "${name}".`); + // must not fail on valid . or .. or similar such constructs + let basename = path.basename(name); + if (basename === '..' || basename === '.' || basename !== name) + throw new Error(`${option} option must not contain a path, found "${name}".`); + } + else { // if (option === 'dir' || option === 'template') { + // assert that dir or template are relative to tmpDir + if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { + throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); + } + let resolvedPath = _resolvePath(name, tmpDir); + if (!resolvedPath.startsWith(tmpDir)) + throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); + } +} + +/** + * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isEBADF(error) { + return _isExpectedError(error, -EBADF, 'EBADF'); +} + +/** + * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isENOENT(error) { + return _isExpectedError(error, -ENOENT, 'ENOENT'); +} + +/** + * Helper to determine whether the expected error code matches the actual code and errno, + * which will differ between the supported node versions. + * + * - Node >= 7.0: + * error.code {string} + * error.errno {number} any numerical value will be negated + * + * CAVEAT + * + * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT + * is no different here. + * + * @param {SystemError} error + * @param {number} errno + * @param {string} code + * @private + */ +function _isExpectedError(error, errno, code) { + return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; +} + +/** + * Sets the graceful cleanup. + * + * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the + * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary + * object removals. + */ +function setGracefulCleanup() { + _gracefulCleanup = true; +} + +/** + * Returns the currently configured tmp dir from os.tmpdir(). + * + * @private + * @param {?Options} options + * @returns {string} the currently configured tmp dir + */ +function _getTmpDir(options) { + return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); +} + +// Install process exit listener +process.addListener(EXIT, _garbageCollector); + +/** + * Configuration options. + * + * @typedef {Object} Options + * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected + * @property {?number} tries the number of tries before give up the name generation + * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files + * @property {?string} template the "mkstemp" like filename template + * @property {?string} name fixed name relative to tmpdir or the specified dir option + * @property {?string} dir tmp directory relative to the root tmp directory in use + * @property {?string} prefix prefix for the generated name + * @property {?string} postfix postfix for the generated name + * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir + * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty + * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection + * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection + */ + +/** + * @typedef {Object} FileSyncObject + * @property {string} name the name of the file + * @property {string} fd the file descriptor or -1 if the fd has been discarded + * @property {fileCallback} removeCallback the callback function to remove the file + */ + +/** + * @typedef {Object} DirSyncObject + * @property {string} name the name of the directory + * @property {fileCallback} removeCallback the callback function to remove the directory + */ + +/** + * @callback tmpNameCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + */ + +/** + * @callback fileCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback fileCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + +/** + * @callback dirCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback dirCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallback + * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallbackSync + */ + +/** + * Callback function for function composition. + * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} + * + * @callback simpleCallback + */ + +// exporting all the needed methods + +// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will +// allow users to reconfigure the temporary directory +Object.defineProperty(module.exports, 'tmpdir', { + enumerable: true, + configurable: false, + get: function () { + return _getTmpDir(); + } +}); + +module.exports.dir = dir; +module.exports.dirSync = dirSync; + +module.exports.file = file; +module.exports.fileSync = fileSync; + +module.exports.tmpName = tmpName; +module.exports.tmpNameSync = tmpNameSync; + +module.exports.setGracefulCleanup = setGracefulCleanup; + + +/***/ }), + +/***/ 176: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.StatusReporter = void 0; +const core_1 = __webpack_require__(470); +/** + * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded + * + * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable + * The total status of the upload/download gets displayed according to this value + * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + */ +class StatusReporter { + constructor(displayFrequencyInMilliseconds) { + this.totalNumberOfFilesToProcess = 0; + this.processedCount = 0; + this.largeFiles = new Map(); + this.totalFileStatus = undefined; + this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; + } + setTotalNumberOfFilesToProcess(fileTotal) { + this.totalNumberOfFilesToProcess = fileTotal; + this.processedCount = 0; + } + start() { + // displays information about the total upload/download status + this.totalFileStatus = setInterval(() => { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); + core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); + }, this.displayFrequencyInMilliseconds); + } + // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload + updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); + core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); + } + stop() { + if (this.totalFileStatus) { + clearInterval(this.totalFileStatus); + } + } + incrementProcessedCount() { + this.processedCount++; + } + formatPercentage(numerator, denominator) { + // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed + return ((numerator / denominator) * 100).toFixed(4).toString(); + } +} +exports.StatusReporter = StatusReporter; +//# sourceMappingURL=status-reporter.js.map + +/***/ }), + +/***/ 177: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkBypass = exports.getProxyUrl = void 0; +function getProxyUrl(reqUrl) { + const usingSsl = reqUrl.protocol === 'https:'; + if (checkBypass(reqUrl)) { + return undefined; + } + const proxyVar = (() => { + if (usingSsl) { + return process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + return process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + })(); + if (proxyVar) { + return new URL(proxyVar); + } + else { + return undefined; + } +} +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; + } + const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; + } + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); + } + else if (reqUrl.protocol === 'http:') { + reqPort = 80; + } + else if (reqUrl.protocol === 'https:') { + reqPort = 443; + } + // Format the request hostname and hostname with port + const upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (const upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; + } + } + return false; +} +exports.checkBypass = checkBypass; +//# sourceMappingURL=proxy.js.map + +/***/ }), + +/***/ 211: +/***/ (function(module) { + +module.exports = require("https"); + +/***/ }), + +/***/ 214: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const artifact_client_1 = __webpack_require__(359); +/** + * Constructs an ArtifactClient + */ +function create() { + return artifact_client_1.DefaultArtifactClient.create(); +} +exports.create = create; +//# sourceMappingURL=artifact-client.js.map + +/***/ }), + +/***/ 245: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = globSync +globSync.GlobSync = GlobSync + +var fs = __webpack_require__(747) +var rp = __webpack_require__(302) +var minimatch = __webpack_require__(93) +var Minimatch = minimatch.Minimatch +var Glob = __webpack_require__(402).Glob +var util = __webpack_require__(669) +var path = __webpack_require__(622) +var assert = __webpack_require__(357) +var isAbsolute = __webpack_require__(681) +var common = __webpack_require__(856) +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + + +/***/ }), + +/***/ 281: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const internal_globber_1 = __webpack_require__(297); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map + +/***/ }), + +/***/ 297: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __webpack_require__(470); +const fs = __webpack_require__(747); +const globOptionsHelper = __webpack_require__(601); +const path = __webpack_require__(622); +const patternHelper = __webpack_require__(597); +const internal_match_kind_1 = __webpack_require__(327); +const internal_pattern_1 = __webpack_require__(923); +const internal_search_state_1 = __webpack_require__(728); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); } - digest(encoding) { - switch (encoding) { - case 'hex': - return this._crc.toString(16).toUpperCase(); - case 'base64': - return this.toBuffer().toString('base64'); - default: - return this.toBuffer(); - } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); } - toBuffer() { - return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff)))); + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); } - static flip64Bits(n) { - return (BigInt(1) << BigInt(64)) - BigInt(1) - n; + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map + +/***/ }), + +/***/ 302: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = __webpack_require__(747) +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = __webpack_require__(117) + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er } + } } -exports.default = CRC64; -//# sourceMappingURL=crc64.js.map -/***/ }), +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} -/***/ 357: -/***/ (function(module) { +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} -module.exports = require("assert"); /***/ }), -/***/ 365: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 306: +/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; +var concatMap = __webpack_require__(896); +var balanced = __webpack_require__(621); -Object.defineProperty(exports, "__esModule", { value: true }); -const pathHelper = __webpack_require__(175); -const internal_match_kind_1 = __webpack_require__(432); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Given an array of patterns, returns an array of paths to search. - * Duplicates and paths under other included paths are filtered out. - */ -function getSearchPaths(patterns) { - // Ignore negate patterns - patterns = patterns.filter(x => !x.negate); - // Create a map of all search paths - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - searchPathMap[key] = 'candidate'; +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); } - const result = []; - for (const pattern of patterns) { - // Check if already included - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - if (searchPathMap[key] === 'included') { - continue; - } - // Check for an ancestor search path - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; - } - tempKey = parent; - parent = pathHelper.dirname(tempKey); - } - // Include the search pattern in the result - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = 'included'; + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); } - return result; + } + + return expansions; } -exports.getSearchPaths = getSearchPaths; -/** - * Matches the patterns against the path - */ -function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); - } - else { - result |= pattern.match(itemPath); + + + +/***/ }), + +/***/ 315: +/***/ (function(module) { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true } + }) } - return result; + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } } -exports.match = match; + + +/***/ }), + +/***/ 327: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); /** - * Checks whether to descend further into the directory + * Indicates whether a pattern matches a path */ -function partialMatch(patterns, itemPath) { - return patterns.some(x => !x.negate && x.partialMatch(itemPath)); -} -exports.partialMatch = partialMatch; -//# sourceMappingURL=internal-pattern-helper.js.map +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map /***/ }), -/***/ 390: +/***/ 357: +/***/ (function(module) { + +module.exports = require("assert"); + +/***/ }), + +/***/ 359: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -4730,14 +4465,14 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", { value: true }); exports.DefaultArtifactClient = void 0; -const core = __importStar(__webpack_require__(676)); -const upload_specification_1 = __webpack_require__(30); -const upload_http_client_1 = __webpack_require__(800); -const utils_1 = __webpack_require__(128); -const path_and_artifact_name_validation_1 = __webpack_require__(547); -const download_http_client_1 = __webpack_require__(20); -const download_specification_1 = __webpack_require__(426); -const config_variables_1 = __webpack_require__(750); +const core = __importStar(__webpack_require__(470)); +const upload_specification_1 = __webpack_require__(590); +const upload_http_client_1 = __webpack_require__(608); +const utils_1 = __webpack_require__(870); +const path_and_artifact_name_validation_1 = __webpack_require__(553); +const download_http_client_1 = __webpack_require__(855); +const download_specification_1 = __webpack_require__(532); +const config_variables_1 = __webpack_require__(401); const path_1 = __webpack_require__(622); class DefaultArtifactClient { /** @@ -4862,684 +4597,1094 @@ Note: The size of downloaded zips can differ significantly from the reported siz core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); - yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + response.push({ + artifactName: currentArtifactToDownload.name, + downloadPath: downloadSpecification.rootDownloadLocation + }); + } + return response; + }); + } +} +exports.DefaultArtifactClient = DefaultArtifactClient; +//# sourceMappingURL=artifact-client.js.map + +/***/ }), + +/***/ 383: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const assert = __webpack_require__(357); +const path = __webpack_require__(622); +const pathHelper = __webpack_require__(972); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map + +/***/ }), + +/***/ 385: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const artifact_1 = __webpack_require__(214); +const search_1 = __webpack_require__(575); +const input_helper_1 = __webpack_require__(583); +const constants_1 = __webpack_require__(694); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const inputs = input_helper_1.getInputs(); + const searchResult = yield search_1.findFilesToUpload(inputs.searchPath); + if (searchResult.filesToUpload.length === 0) { + // No files were found, different use cases warrant different types of behavior if nothing is found + switch (inputs.ifNoFilesFound) { + case constants_1.NoFileOptions.warn: { + core.warning(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); + break; + } + case constants_1.NoFileOptions.error: { + core.setFailed(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); + break; + } + case constants_1.NoFileOptions.ignore: { + core.info(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); + break; + } + } + } + else { + const s = searchResult.filesToUpload.length === 1 ? '' : 's'; + core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`); + core.debug(`Root artifact directory is ${searchResult.rootDirectory}`); + if (searchResult.filesToUpload.length > 10000) { + core.warning(`There are over 10,000 files in this artifact, consider creating an archive before upload to improve the upload performance.`); + } + const artifactClient = artifact_1.create(); + const options = { + continueOnError: false + }; + if (inputs.retentionDays) { + options.retentionDays = inputs.retentionDays; + } + const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options); + if (uploadResponse.failedItems.length > 0) { + core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`); + } + else { + core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`); } - response.push({ - artifactName: currentArtifactToDownload.name, - downloadPath: downloadSpecification.rootDownloadLocation - }); } - return response; - }); - } + } + catch (err) { + core.setFailed(err.message); + } + }); } -exports.DefaultArtifactClient = DefaultArtifactClient; -//# sourceMappingURL=artifact-client.js.map +run(); + /***/ }), -/***/ 409: -/***/ (function(module) { +/***/ 401: +/***/ (function(__unusedmodule, exports) { "use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; +// The number of concurrent uploads that happens at the same time +function getUploadFileConcurrency() { + return 2; +} +exports.getUploadFileConcurrency = getUploadFileConcurrency; +// When uploading large files that can't be uploaded with a single http call, this controls +// the chunk size that is used during upload +function getUploadChunkSize() { + return 8 * 1024 * 1024; // 8 MB Chunks +} +exports.getUploadChunkSize = getUploadChunkSize; +// The maximum number of retries that can be attempted before an upload or download fails +function getRetryLimit() { + return 5; +} +exports.getRetryLimit = getRetryLimit; +// With exponential backoff, the larger the retry count, the larger the wait time before another attempt +// The retry multiplier controls by how much the backOff time increases depending on the number of retries +function getRetryMultiplier() { + return 1.5; +} +exports.getRetryMultiplier = getRetryMultiplier; +// The initial wait time if an upload or download fails and a retry is being attempted for the first time +function getInitialRetryIntervalInMilliseconds() { + return 3000; +} +exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; +// The number of concurrent downloads that happens at the same time +function getDownloadFileConcurrency() { + return 2; +} +exports.getDownloadFileConcurrency = getDownloadFileConcurrency; +function getRuntimeToken() { + const token = process.env['ACTIONS_RUNTIME_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); + } + return token; +} +exports.getRuntimeToken = getRuntimeToken; +function getRuntimeUrl() { + const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); + } + return runtimeUrl; +} +exports.getRuntimeUrl = getRuntimeUrl; +function getWorkFlowRunId() { + const workFlowRunId = process.env['GITHUB_RUN_ID']; + if (!workFlowRunId) { + throw new Error('Unable to get GITHUB_RUN_ID env variable'); + } + return workFlowRunId; +} +exports.getWorkFlowRunId = getWorkFlowRunId; +function getWorkSpaceDirectory() { + const workspaceDirectory = process.env['GITHUB_WORKSPACE']; + if (!workspaceDirectory) { + throw new Error('Unable to get GITHUB_WORKSPACE env variable'); + } + return workspaceDirectory; +} +exports.getWorkSpaceDirectory = getWorkSpaceDirectory; +function getRetentionDays() { + return process.env['GITHUB_RETENTION_DAYS']; +} +exports.getRetentionDays = getRetentionDays; +//# sourceMappingURL=config-variables.js.map + +/***/ }), -function posix(path) { - return path.charAt(0) === '/'; +/***/ 402: +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var fs = __webpack_require__(747) +var rp = __webpack_require__(302) +var minimatch = __webpack_require__(93) +var Minimatch = minimatch.Minimatch +var inherits = __webpack_require__(689) +var EE = __webpack_require__(614).EventEmitter +var path = __webpack_require__(622) +var assert = __webpack_require__(357) +var isAbsolute = __webpack_require__(681) +var globSync = __webpack_require__(245) +var common = __webpack_require__(856) +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = __webpack_require__(674) +var util = __webpack_require__(669) +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = __webpack_require__(49) + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) } -function win32(path) { - // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path); - var device = result[1] || ''; - var isUnc = Boolean(device && device.charAt(1) !== ':'); +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync - // UNC paths are always absolute - return Boolean(result[2] || isUnc); +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin } -module.exports = process.platform === 'win32' ? win32 : posix; -module.exports.posix = posix; -module.exports.win32 = win32; +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + var g = new Glob(pattern, options) + var set = g.minimatch.set -/***/ }), + if (!pattern) + return false -/***/ 413: -/***/ (function(module) { + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } -module.exports = require("stream"); + var self = this + this._processing = 0 -/***/ }), + this._emitQueue = [] + this._processQueue = [] + this.paused = false -/***/ 417: -/***/ (function(module) { + if (this.noprocess) + return this -module.exports = require("crypto"); + if (n === 0) + return done() -/***/ }), + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false -/***/ 424: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} -"use strict"; +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return -// For internal use, subject to change. -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__webpack_require__(747)); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(615); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) } -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map -/***/ }), +Glob.prototype._realpath = function () { + if (this._didRealpath) + return -/***/ 426: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + this._didRealpath = true -"use strict"; + var n = this.matches.length + if (n === 0) + return this._finish() -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getDownloadSpecification = void 0; -const path = __importStar(__webpack_require__(622)); -/** - * Creates a specification for a set of files that will be downloaded - * @param artifactName the name of the artifact - * @param artifactEntries a set of container entries that describe that files that make up an artifact - * @param downloadPath the path where the artifact will be downloaded to - * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to - */ -function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { - // use a set for the directory paths so that there are no duplicates - const directories = new Set(); - const specifications = { - rootDownloadLocation: includeRootDirectory - ? path.join(downloadPath, artifactName) - : downloadPath, - directoryStructure: [], - emptyFilesToCreate: [], - filesToDownload: [] - }; - for (const entry of artifactEntries) { - // Ignore artifacts in the container that don't begin with the same name - if (entry.path.startsWith(`${artifactName}/`) || - entry.path.startsWith(`${artifactName}\\`)) { - // normalize all separators to the local OS - const normalizedPathEntry = path.normalize(entry.path); - // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path - const filePath = path.join(downloadPath, includeRootDirectory - ? normalizedPathEntry - : normalizedPathEntry.replace(artifactName, '')); - // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' - // itemType cannot be relied upon. The file must be used to determine the directory structure - if (entry.itemType === 'file') { - // Get the directories that we need to create from the filePath for each individual file - directories.add(path.dirname(filePath)); - if (entry.fileLength === 0) { - // An empty file was uploaded, create the empty files locally so that no extra http calls are made - specifications.emptyFilesToCreate.push(filePath); - } - else { - specifications.filesToDownload.push({ - sourceLocation: entry.contentLocation, - targetPath: filePath - }); - } - } - } - } - specifications.directoryStructure = Array.from(directories); - return specifications; -} -exports.getDownloadSpecification = getDownloadSpecification; -//# sourceMappingURL=download-specification.js.map + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) -/***/ }), + function next () { + if (--n === 0) + self._finish() + } +} -/***/ 432: -/***/ (function(__unusedmodule, exports) { +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() -"use strict"; + var found = Object.keys(matchset) + var self = this + var n = found.length -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Indicates whether a pattern matches a path - */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map + if (n === 0) + return cb() -/***/ }), + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here -/***/ 459: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} -exports.alphasort = alphasort -exports.alphasorti = alphasorti -exports.setopts = setopts -exports.ownProp = ownProp -exports.makeAbs = makeAbs -exports.finish = finish -exports.mark = mark -exports.isIgnored = isIgnored -exports.childrenIgnored = childrenIgnored +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} -function ownProp (obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field) +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) } -var path = __webpack_require__(622) -var minimatch = __webpack_require__(727) -var isAbsolute = __webpack_require__(409) -var Minimatch = minimatch.Minimatch +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} -function alphasorti (a, b) { - return a.toLowerCase().localeCompare(b.toLowerCase()) +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } } -function alphasort (a, b) { - return a.localeCompare(b) +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } } -function setupIgnores (self, options) { - self.ignore = options.ignore || [] +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') - if (!Array.isArray(self.ignore)) - self.ignore = [self.ignore] + if (this.aborted) + return - if (self.ignore.length) { - self.ignore = self.ignore.map(ignoreMap) + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return } -} -// ignore patterns are always in dot:true mode. -function ignoreMap (pattern) { - var gmatcher = null - if (pattern.slice(-3) === '/**') { - var gpattern = pattern.replace(/(\/\*\*)+$/, '') - gmatcher = new Minimatch(gpattern, { dot: true }) - } + //console.error('PROCESS %d', this._processing, pattern) - return { - matcher: new Minimatch(pattern, { dot: true }), - gmatcher: gmatcher + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ } -} + // now n is the index of the first one that is *not* a string. -function setopts (self, pattern, options) { - if (!options) - options = {} + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return - // base-matching: just use globstar for that. - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar") - } - pattern = "**/" + pattern + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break } - self.silent = !!options.silent - self.pattern = pattern - self.strict = options.strict !== false - self.realpath = !!options.realpath - self.realpathCache = options.realpathCache || Object.create(null) - self.follow = !!options.follow - self.dot = !!options.dot - self.mark = !!options.mark - self.nodir = !!options.nodir - if (self.nodir) - self.mark = true - self.sync = !!options.sync - self.nounique = !!options.nounique - self.nonull = !!options.nonull - self.nosort = !!options.nosort - self.nocase = !!options.nocase - self.stat = !!options.stat - self.noprocess = !!options.noprocess - self.absolute = !!options.absolute + var remain = pattern.slice(n) - self.maxLength = options.maxLength || Infinity - self.cache = options.cache || Object.create(null) - self.statCache = options.statCache || Object.create(null) - self.symlinks = options.symlinks || Object.create(null) + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix - setupIgnores(self, options) + var abs = this._makeAbs(read) - self.changedCwd = false - var cwd = process.cwd() - if (!ownProp(options, "cwd")) - self.cwd = cwd - else { - self.cwd = path.resolve(options.cwd) - self.changedCwd = self.cwd !== cwd - } + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() - self.root = options.root || path.resolve(self.cwd, "/") - self.root = path.resolve(self.root) - if (process.platform === "win32") - self.root = self.root.replace(/\\/g, "/") + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} - // TODO: is an absolute `cwd` supposed to be resolved against `root`? - // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') - self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) - if (process.platform === "win32") - self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") - self.nomount = !!options.nomount +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} - // disable comments and negation in Minimatch. - // Note that they are not supported in Glob itself anyway. - options.nonegate = true - options.nocomment = true +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - self.minimatch = new Minimatch(pattern, options) - self.options = self.minimatch.options -} + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() -function finish (self) { - var nou = self.nounique - var all = nou ? [] : Object.create(null) + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' - for (var i = 0, l = self.matches.length; i < l; i ++) { - var matches = self.matches[i] - if (!matches || Object.keys(matches).length === 0) { - if (self.nonull) { - // do like the shell, and spit out the literal glob - var literal = self.minimatch.globSet[i] - if (nou) - all.push(literal) - else - all[literal] = true + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) } - } else { - // had matches - var m = Object.keys(matches) - if (nou) - all.push.apply(all, m) - else - m.forEach(function (m) { - all[m] = true - }) + if (m) + matchedEntries.push(e) } } - if (!nou) - all = Object.keys(all) + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - if (!self.nosort) - all = all.sort(self.nocase ? alphasorti : alphasort) + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() - // at *some* point we statted all of these - if (self.mark) { - for (var i = 0; i < all.length; i++) { - all[i] = self._mark(all[i]) + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) } - if (self.nodir) { - all = all.filter(function (e) { - var notDir = !(/\/$/.test(e)) - var c = self.cache[e] || self.cache[makeAbs(self, e)] - if (notDir && c) - notDir = c !== 'DIR' && !Array.isArray(c) - return notDir - }) + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return } - if (self.ignore.length) - all = all.filter(function(m) { - return !isIgnored(self, m) - }) + var abs = isAbsolute(e) ? e : this._makeAbs(e) - self.found = all -} + if (this.mark) + e = this._mark(e) -function mark (self, p) { - var abs = makeAbs(self, p) - var c = self.cache[abs] - var m = p - if (c) { - var isDir = c === 'DIR' || Array.isArray(c) - var slash = p.slice(-1) === '/' + if (this.absolute) + e = abs - if (isDir && !slash) - m += '/' - else if (!isDir && slash) - m = m.slice(0, -1) + if (this.matches[index][e]) + return - if (m !== p) { - var mabs = makeAbs(self, m) - self.statCache[mabs] = self.statCache[abs] - self.cache[mabs] = self.cache[abs] - } + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return } - return m + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) } -// lotta situps... -function makeAbs (self, f) { - var abs = f - if (f.charAt(0) === '/') { - abs = path.join(self.root, f) - } else if (isAbsolute(f) || f === '') { - abs = f - } else if (self.changedCwd) { - abs = path.resolve(self.cwd, f) - } else { - abs = path.resolve(f) - } +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return - if (process.platform === 'win32') - abs = abs.replace(/\\/g, '/') + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) - return abs -} + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + if (lstatcb) + fs.lstat(abs, lstatcb) -// Return true, if pattern ends with globstar '**', for the accompanying parent directory. -// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents -function isIgnored (self, path) { - if (!self.ignore.length) - return false + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() - return self.ignore.some(function(item) { - return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) - }) + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } } -function childrenIgnored (self, path) { - if (!self.ignore.length) - return false +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return - return self.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path)) - }) -} + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) -/***/ }), + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() -/***/ 479: -/***/ (function(__unusedmodule, exports) { + if (Array.isArray(c)) + return cb(null, c) + } -"use strict"; + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } } -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Bearer ${this.token}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true } + } + + this.cache[abs] = entries + return cb(null, entries) } -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - if (!options.headers) { - throw Error('The request has no headers'); - } - options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; - } - // This handler cannot handle 401 - canHandleAuthentication() { - return false; - } - handleAuthentication() { - return __awaiter(this, void 0, void 0, function* () { - throw new Error('not implemented'); - }); - } + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() } -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; -//# sourceMappingURL=auth.js.map -/***/ }), +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} -/***/ 507: -/***/ (function(module) { -"use strict"; +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() - var r = range(a, b, str); + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} + var isSym = this.symlinks[abs] + var len = entries.length -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() - if (ai >= 0 && bi > 0) { - begs = []; - left = str.length; + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) - bi = str.indexOf(b, i + 1); - } + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } - i = ai < bi && ai >= 0 ? ai : bi; - } + cb() +} - if (begs.length) { - result = [ left, right ]; +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' } } - return result; + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() } +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' -/***/ }), + if (f.length > this.maxLength) + return cb() -/***/ 547: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] -"use strict"; + if (Array.isArray(c)) + c = 'DIR' -Object.defineProperty(exports, "__esModule", { value: true }); -exports.checkArtifactFilePath = exports.checkArtifactName = void 0; -const core_1 = __webpack_require__(676); -/** - * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected - * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain - * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an - * individual filesystem/platform will not be supported on all fileSystems/platforms - * - * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone - */ -const invalidArtifactFilePathCharacters = new Map([ - ['"', ' Double quote "'], - [':', ' Colon :'], - ['<', ' Less than <'], - ['>', ' Greater than >'], - ['|', ' Vertical bar |'], - ['*', ' Asterisk *'], - ['?', ' Question mark ?'], - ['\r', ' Carriage return \\r'], - ['\n', ' Line feed \\n'] -]); -const invalidArtifactNameCharacters = new Map([ - ...invalidArtifactFilePathCharacters, - ['\\', ' Backslash \\'], - ['/', ' Forward slash /'] -]); -/** - * Scans the name of the artifact to make sure there are no illegal characters - */ -function checkArtifactName(name) { - if (!name) { - throw new Error(`Artifact name: ${name}, is incorrectly provided`); + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) } - for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) { - if (name.includes(invalidCharacterKey)) { - throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter} - -Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()} - -These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); - } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) } - core_1.info(`Artifact name is valid!`); + } } -exports.checkArtifactName = checkArtifactName; -/** - * Scans the name of the filePath used to make sure there are no illegal characters - */ -function checkArtifactFilePath(path) { - if (!path) { - throw new Error(`Artifact path: ${path}, is incorrectly provided`); - } - for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { - if (path.includes(invalidCharacterKey)) { - throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter} - -Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} - -The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. - `); - } - } + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) } -exports.checkArtifactFilePath = checkArtifactFilePath; -//# sourceMappingURL=path-and-artifact-name-validation.js.map + + +/***/ }), + +/***/ 413: +/***/ (function(module) { + +module.exports = require("stream"); + +/***/ }), + +/***/ 417: +/***/ (function(module) { + +module.exports = require("crypto"); /***/ }), -/***/ 582: +/***/ 425: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -5577,8 +5722,8 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; const http = __importStar(__webpack_require__(605)); const https = __importStar(__webpack_require__(211)); -const pm = __importStar(__webpack_require__(230)); -const tunnel = __importStar(__webpack_require__(257)); +const pm = __importStar(__webpack_require__(177)); +const tunnel = __importStar(__webpack_require__(988)); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; @@ -5901,608 +6046,382 @@ class HttpClient { } else if (!res) { // If `err` is not passed, then `res` must be passed. - reject(new Error('Unknown error')); - } - else { - resolve(res); - } - } - this.requestRawWithCallback(info, data, callbackForResult); - }); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - if (typeof data === 'string') { - if (!info.options.headers) { - info.options.headers = {}; - } - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - function handleResult(err, res) { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - } - const req = info.httpModule.request(info.options, (msg) => { - const res = new HttpClientResponse(msg); - handleResult(undefined, res); - }); - let socket; - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error(`Request timeout: ${info.options.path}`)); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - const parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - for (const handler of this.handlers) { - handler.prepareRequest(info.options); - } - } - return info; - } - _mergeHeaders(headers) { - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - const proxyUrl = pm.getProxyUrl(parsedUrl); - const useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. - if (proxyUrl && proxyUrl.hostname) { - const agentOptions = { - maxSockets, - keepAlive: this._keepAlive, - proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` - })), { host: proxyUrl.hostname, port: proxyUrl.port }) - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - return __awaiter(this, void 0, void 0, function* () { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - }); - } - _processResponse(res, options) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { - const statusCode = res.message.statusCode || 0; - const response = { - statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode === HttpCodes.NotFound) { - resolve(response); - } - // get the result from the body - function dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - const a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - let obj; - let contents; - try { - contents = yield res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; + reject(new Error('Unknown error')); } else { - msg = `Failed request: (${statusCode})`; + resolve(res); } - const err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); } - })); + this.requestRawWithCallback(info, data, callbackForResult); + }); }); } -} -exports.HttpClient = HttpClient; -const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); -//# sourceMappingURL=index.js.map - -/***/ }), - -/***/ 605: -/***/ (function(module) { - -module.exports = require("http"); - -/***/ }), - -/***/ 609: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = __webpack_require__(357); -const os = __webpack_require__(87); -const path = __webpack_require__(622); -const pathHelper = __webpack_require__(175); -const minimatch_1 = __webpack_require__(727); -const internal_match_kind_1 = __webpack_require__(432); -const internal_path_1 = __webpack_require__(318); -const IS_WINDOWS = process.platform === 'win32'; -class Pattern { - constructor(patternOrNegate, segments) { - /** - * Indicates whether matches should be excluded from the result set - */ - this.negate = false; - // Pattern overload - let pattern; - if (typeof patternOrNegate === 'string') { - pattern = patternOrNegate.trim(); - } - // Segments overload - else { - // Convert to pattern - segments = segments || []; - assert(segments.length, `Parameter 'segments' must not empty`); - const root = Pattern.getLiteral(segments[0]); - assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); - pattern = new internal_path_1.Path(segments).toString().trim(); - if (patternOrNegate) { - pattern = `!${pattern}`; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + if (typeof data === 'string') { + if (!info.options.headers) { + info.options.headers = {}; } + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } - // Negate - while (pattern.startsWith('!')) { - this.negate = !this.negate; - pattern = pattern.substr(1).trim(); + let callbackCalled = false; + function handleResult(err, res) { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } } - // Normalize slashes and ensures absolute root - pattern = Pattern.fixupPattern(pattern); - // Segments - this.segments = new internal_path_1.Path(pattern).segments; - // Trailing slash indicates the pattern should only match directories, not regular files - this.trailingSeparator = pathHelper - .normalizeSeparators(pattern) - .endsWith(path.sep); - pattern = pathHelper.safeTrimTrailingSeparator(pattern); - // Search path (literal path prior to the first glob segment) - let foundGlob = false; - const searchSegments = this.segments - .map(x => Pattern.getLiteral(x)) - .filter(x => !foundGlob && !(foundGlob = x === '')); - this.searchPath = new internal_path_1.Path(searchSegments).toString(); - // Root RegExp (required when determining partial match) - this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); - // Create minimatch - const minimatchOptions = { - dot: true, - nobrace: true, - nocase: IS_WINDOWS, - nocomment: true, - noext: true, - nonegate: true - }; - pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; - this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); - } - /** - * Matches the pattern against the specified path - */ - match(itemPath) { - // Last segment is globstar? - if (this.segments[this.segments.length - 1] === '**') { - // Normalize slashes - itemPath = pathHelper.normalizeSeparators(itemPath); - // Append a trailing slash. Otherwise Minimatch will not match the directory immediately - // preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns - // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. - if (!itemPath.endsWith(path.sep)) { - // Note, this is safe because the constructor ensures the pattern has an absolute root. - // For example, formats like C: and C:foo on Windows are resolved to an aboslute root. - itemPath = `${itemPath}${path.sep}`; + const req = info.httpModule.request(info.options, (msg) => { + const res = new HttpClientResponse(msg); + handleResult(undefined, res); + }); + let socket; + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); } + handleResult(new Error(`Request timeout: ${info.options.path}`)); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); } - else { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); } - // Match - if (this.minimatch.match(itemPath)) { - return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + else { + req.end(); } - return internal_match_kind_1.MatchKind.None; } /** - * Indicates whether the pattern may match descendants of the specified path + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ - partialMatch(itemPath) { - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // matchOne does not handle root path correctly - if (pathHelper.dirname(itemPath) === itemPath) { - return this.rootRegExp.test(itemPath); + getAgent(serverUrl) { + const parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; } - return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + for (const handler of this.handlers) { + handler.prepareRequest(info.options); + } + } + return info; } - /** - * Escapes glob patterns within a path - */ - static globEscape(s) { - return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS - .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment - .replace(/\?/g, '[?]') // escape '?' - .replace(/\*/g, '[*]'); // escape '*' + _mergeHeaders(headers) { + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); + } + return lowercaseKeys(headers || {}); } - /** - * Normalizes slashes and ensures absolute root - */ - static fixupPattern(pattern) { - // Empty - assert(pattern, 'pattern cannot be empty'); - // Must not contain `.` segment, unless first segment - // Must not contain `..` segment - const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); - assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); - // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r - assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); - // Normalize slashes - pattern = pathHelper.normalizeSeparators(pattern); - // Replace leading `.` segment - if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { - pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; } - // Replace leading `~` segment - else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { - const homedir = os.homedir(); - assert(homedir, 'Unable to determine HOME directory'); - assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); - pattern = Pattern.globEscape(homedir) + pattern.substr(1); + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + const proxyUrl = pm.getProxyUrl(parsedUrl); + const useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } - // Replace relative drive root, e.g. pattern is C: or C:foo - else if (IS_WINDOWS && - (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); - if (pattern.length > 2 && !root.endsWith('\\')) { - root += '\\'; + // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. + if (proxyUrl && proxyUrl.hostname) { + const agentOptions = { + maxSockets, + keepAlive: this._keepAlive, + proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` + })), { host: proxyUrl.hostname, port: proxyUrl.port }) + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; } - pattern = Pattern.globEscape(root) + pattern.substr(2); - } - // Replace relative root, e.g. pattern is \ or \foo - else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { - let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); - if (!root.endsWith('\\')) { - root += '\\'; + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; } - pattern = Pattern.globEscape(root) + pattern.substr(1); + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; } - // Otherwise ensure absolute root - else { - pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; } - return pathHelper.normalizeSeparators(pattern); + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; } - /** - * Attempts to unescape a pattern segment to create a literal path segment. - * Otherwise returns empty string. - */ - static getLiteral(segment) { - let literal = ''; - for (let i = 0; i < segment.length; i++) { - const c = segment[i]; - // Escape - if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { - literal += segment[++i]; - continue; - } - // Wildcard - else if (c === '*' || c === '?') { - return ''; - } - // Character set - else if (c === '[' && i + 1 < segment.length) { - let set = ''; - let closed = -1; - for (let i2 = i + 1; i2 < segment.length; i2++) { - const c2 = segment[i2]; - // Escape - if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { - set += segment[++i2]; - continue; - } - // Closed - else if (c2 === ']') { - closed = i2; - break; + _performExponentialBackoff(retryNumber) { + return __awaiter(this, void 0, void 0, function* () { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode || 0; + const response = { + statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode === HttpCodes.NotFound) { + resolve(response); + } + // get the result from the body + function dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + const a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } } - // Otherwise - else { - set += c2; + return value; + } + let obj; + let contents; + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; } + response.headers = res.message.headers; } - // Closed? - if (closed >= 0) { - // Cannot convert - if (set.length > 1) { - return ''; + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; } - // Convert to literal - if (set) { - literal += set; - i = closed; - continue; + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; } + else { + msg = `Failed request: (${statusCode})`; + } + const err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); } - // Otherwise fall thru - } - // Append - literal += c; - } - return literal; - } - /** - * Escapes regexp special characters - * https://javascript.info/regexp-escaping - */ - static regExpEscape(s) { - return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); + else { + resolve(response); + } + })); + }); } } -exports.Pattern = Pattern; -//# sourceMappingURL=internal-pattern.js.map - -/***/ }), - -/***/ 614: -/***/ (function(module) { - -module.exports = require("events"); +exports.HttpClient = HttpClient; +const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); +//# sourceMappingURL=index.js.map /***/ }), -/***/ 615: -/***/ (function(__unusedmodule, exports) { +/***/ 431: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(82); /** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; } - else if (typeof input === 'string' || input instanceof String) { - return input; + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; } - return JSON.stringify(input); } -exports.toCommandValue = toCommandValue; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 622: -/***/ (function(module) { - -module.exports = require("path"); +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map /***/ }), -/***/ 628: +/***/ 452: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const core = __importStar(__webpack_require__(676)); -const constants_1 = __webpack_require__(858); +exports.HttpManager = void 0; +const utils_1 = __webpack_require__(870); /** - * Helper to get all the inputs for the action + * Used for managing http clients during either upload or download */ -function getInputs() { - const name = core.getInput(constants_1.Inputs.Name); - const path = core.getInput(constants_1.Inputs.Path, { required: true }); - const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound); - const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound]; - if (!noFileBehavior) { - core.setFailed(`Unrecognized ${constants_1.Inputs.IfNoFilesFound} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(constants_1.NoFileOptions)}`); +class HttpManager { + constructor(clientCount, userAgent) { + if (clientCount < 1) { + throw new Error('There must be at least one client'); + } + this.userAgent = userAgent; + this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); } - const inputs = { - artifactName: name, - searchPath: path, - ifNoFilesFound: noFileBehavior - }; - const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays); - if (retentionDaysStr) { - inputs.retentionDays = parseInt(retentionDaysStr); - if (isNaN(inputs.retentionDays)) { - core.setFailed('Invalid retention-days'); + getClient(index) { + return this.clients[index]; + } + // client disposal is necessary if a keep-alive connection is used to properly close the connection + // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 + disposeAndReplaceClient(index) { + this.clients[index].dispose(); + this.clients[index] = utils_1.createHttpClient(this.userAgent); + } + disposeAndReplaceAllClients() { + for (const [index] of this.clients.entries()) { + this.disposeAndReplaceClient(index); } } - return inputs; } -exports.getInputs = getInputs; - - -/***/ }), - -/***/ 630: -/***/ (function(module) { - -module.exports = require("perf_hooks"); - -/***/ }), - -/***/ 631: -/***/ (function(module) { - -module.exports = require("net"); - -/***/ }), - -/***/ 669: -/***/ (function(module) { - -module.exports = require("util"); +exports.HttpManager = HttpManager; +//# sourceMappingURL=http-manager.js.map /***/ }), -/***/ 676: +/***/ 470: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -6524,9 +6443,9 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -const command_1 = __webpack_require__(936); -const file_command_1 = __webpack_require__(424); -const utils_1 = __webpack_require__(615); +const command_1 = __webpack_require__(431); +const file_command_1 = __webpack_require__(102); +const utils_1 = __webpack_require__(82); const os = __importStar(__webpack_require__(87)); const path = __importStar(__webpack_require__(622)); /** @@ -6747,7 +6666,7 @@ exports.getState = getState; /***/ }), -/***/ 682: +/***/ 489: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -6782,9 +6701,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }; Object.defineProperty(exports, "__esModule", { value: true }); exports.retryHttpClientRequest = exports.retry = void 0; -const utils_1 = __webpack_require__(128); -const core = __importStar(__webpack_require__(676)); -const config_variables_1 = __webpack_require__(750); +const utils_1 = __webpack_require__(870); +const core = __importStar(__webpack_require__(470)); +const config_variables_1 = __webpack_require__(401); function retry(name, operation, customErrorMessages, maxAttempts) { return __awaiter(this, void 0, void 0, function* () { let response = undefined; @@ -6842,1358 +6761,1500 @@ exports.retryHttpClientRequest = retryHttpClientRequest; /***/ }), -/***/ 691: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var wrappy = __webpack_require__(50) -var reqs = Object.create(null) -var once = __webpack_require__(731) +/***/ 532: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -module.exports = wrappy(inflight) +"use strict"; -function inflight (key, cb) { - if (reqs[key]) { - reqs[key].push(cb) - return null - } else { - reqs[key] = [cb] - return makeres(key) - } +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getDownloadSpecification = void 0; +const path = __importStar(__webpack_require__(622)); +/** + * Creates a specification for a set of files that will be downloaded + * @param artifactName the name of the artifact + * @param artifactEntries a set of container entries that describe that files that make up an artifact + * @param downloadPath the path where the artifact will be downloaded to + * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to + */ +function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { + // use a set for the directory paths so that there are no duplicates + const directories = new Set(); + const specifications = { + rootDownloadLocation: includeRootDirectory + ? path.join(downloadPath, artifactName) + : downloadPath, + directoryStructure: [], + emptyFilesToCreate: [], + filesToDownload: [] + }; + for (const entry of artifactEntries) { + // Ignore artifacts in the container that don't begin with the same name + if (entry.path.startsWith(`${artifactName}/`) || + entry.path.startsWith(`${artifactName}\\`)) { + // normalize all separators to the local OS + const normalizedPathEntry = path.normalize(entry.path); + // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path + const filePath = path.join(downloadPath, includeRootDirectory + ? normalizedPathEntry + : normalizedPathEntry.replace(artifactName, '')); + // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' + // itemType cannot be relied upon. The file must be used to determine the directory structure + if (entry.itemType === 'file') { + // Get the directories that we need to create from the filePath for each individual file + directories.add(path.dirname(filePath)); + if (entry.fileLength === 0) { + // An empty file was uploaded, create the empty files locally so that no extra http calls are made + specifications.emptyFilesToCreate.push(filePath); + } + else { + specifications.filesToDownload.push({ + sourceLocation: entry.contentLocation, + targetPath: filePath + }); + } + } + } + } + specifications.directoryStructure = Array.from(directories); + return specifications; } +exports.getDownloadSpecification = getDownloadSpecification; +//# sourceMappingURL=download-specification.js.map -function makeres (key) { - return once(function RES () { - var cbs = reqs[key] - var len = cbs.length - var args = slice(arguments) +/***/ }), - // XXX It's somewhat ambiguous whether a new callback added in this - // pass should be queued for later execution if something in the - // list of callbacks throws, or if it should just be discarded. - // However, it's such an edge case that it hardly matters, and either - // choice is likely as surprising as the other. - // As it happens, we do go ahead and schedule it for later execution. - try { - for (var i = 0; i < len; i++) { - cbs[i].apply(null, args) - } - } finally { - if (cbs.length > len) { - // added more in the interim. - // de-zalgo, just in case, but don't call again. - cbs.splice(0, len) - process.nextTick(function () { - RES.apply(null, args) - }) - } else { - delete reqs[key] - } +/***/ 553: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +exports.checkArtifactFilePath = exports.checkArtifactName = void 0; +const core_1 = __webpack_require__(470); +/** + * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected + * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain + * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an + * individual filesystem/platform will not be supported on all fileSystems/platforms + * + * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone + */ +const invalidArtifactFilePathCharacters = new Map([ + ['"', ' Double quote "'], + [':', ' Colon :'], + ['<', ' Less than <'], + ['>', ' Greater than >'], + ['|', ' Vertical bar |'], + ['*', ' Asterisk *'], + ['?', ' Question mark ?'], + ['\r', ' Carriage return \\r'], + ['\n', ' Line feed \\n'] +]); +const invalidArtifactNameCharacters = new Map([ + ...invalidArtifactFilePathCharacters, + ['\\', ' Backslash \\'], + ['/', ' Forward slash /'] +]); +/** + * Scans the name of the artifact to make sure there are no illegal characters + */ +function checkArtifactName(name) { + if (!name) { + throw new Error(`Artifact name: ${name}, is incorrectly provided`); } - }) + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) { + if (name.includes(invalidCharacterKey)) { + throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()} + +These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); + } + } + core_1.info(`Artifact name is valid!`); } - -function slice (args) { - var length = args.length - var array = [] - - for (var i = 0; i < length; i++) array[i] = args[i] - return array +exports.checkArtifactName = checkArtifactName; +/** + * Scans the name of the filePath used to make sure there are no illegal characters + */ +function checkArtifactFilePath(path) { + if (!path) { + throw new Error(`Artifact path: ${path}, is incorrectly provided`); + } + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { + if (path.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} + +The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. + `); + } + } } - +exports.checkArtifactFilePath = checkArtifactFilePath; +//# sourceMappingURL=path-and-artifact-name-validation.js.map /***/ }), -/***/ 707: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 554: +/***/ (function(__unusedmodule, exports) { "use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; Object.defineProperty(exports, "__esModule", { value: true }); -exports.create = void 0; -const artifact_client_1 = __webpack_require__(390); -/** - * Constructs an ArtifactClient - */ -function create() { - return artifact_client_1.DefaultArtifactClient.create(); +exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } } -exports.create = create; -//# sourceMappingURL=artifact-client.js.map +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Bearer ${this.token}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + if (!options.headers) { + throw Error('The request has no headers'); + } + options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; + } + // This handler cannot handle 401 + canHandleAuthentication() { + return false; + } + handleAuthentication() { + return __awaiter(this, void 0, void 0, function* () { + throw new Error('not implemented'); + }); + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; +//# sourceMappingURL=auth.js.map /***/ }), -/***/ 719: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 575: +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; - -const { promisify } = __webpack_require__(669); -const tmp = __webpack_require__(26); - -// file -module.exports.fileSync = tmp.fileSync; -const fileWithOptions = promisify((options, cb) => - tmp.file(options, (err, path, fd, cleanup) => - err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) }) - ) -); -module.exports.file = async (options) => fileWithOptions(options); - -module.exports.withFile = async function withFile(fn, options) { - const { path, fd, cleanup } = await module.exports.file(options); - try { - return await fn({ path, fd }); - } finally { - await cleanup(); - } +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - - -// directory -module.exports.dirSync = tmp.dirSync; -const dirWithOptions = promisify((options, cb) => - tmp.dir(options, (err, path, cleanup) => - err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) }) - ) -); -module.exports.dir = async (options) => dirWithOptions(options); - -module.exports.withDir = async function withDir(fn, options) { - const { path, cleanup } = await module.exports.dir(options); - try { - return await fn({ path }); - } finally { - await cleanup(); - } +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; }; - - -// name generation -module.exports.tmpNameSync = tmp.tmpNameSync; -module.exports.tmpName = promisify(tmp.tmpName); - -module.exports.tmpdir = tmp.tmpdir; - -module.exports.setGracefulCleanup = tmp.setGracefulCleanup; +Object.defineProperty(exports, "__esModule", { value: true }); +const glob = __importStar(__webpack_require__(281)); +const path = __importStar(__webpack_require__(622)); +const core_1 = __webpack_require__(470); +const fs_1 = __webpack_require__(747); +const path_1 = __webpack_require__(622); +const util_1 = __webpack_require__(669); +const stats = util_1.promisify(fs_1.stat); +function getDefaultGlobOptions() { + return { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; +} +/** + * If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as + * the delimiter to control the directory structure for the artifact. This function returns the LCA + * when given an array of search paths + * + * Example 1: The patterns `/foo/` and `/bar/` returns `/` + * + * Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo` + */ +function getMultiPathLCA(searchPaths) { + if (searchPaths.length < 2) { + throw new Error('At least two search paths must be provided'); + } + const commonPaths = new Array(); + const splitPaths = new Array(); + let smallestPathLength = Number.MAX_SAFE_INTEGER; + // split each of the search paths using the platform specific separator + for (const searchPath of searchPaths) { + core_1.debug(`Using search path ${searchPath}`); + const splitSearchPath = path.normalize(searchPath).split(path.sep); + // keep track of the smallest path length so that we don't accidentally later go out of bounds + smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length); + splitPaths.push(splitSearchPath); + } + // on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it + if (searchPaths[0].startsWith(path.sep)) { + commonPaths.push(path.sep); + } + let splitIndex = 0; + // function to check if the paths are the same at a specific index + function isPathTheSame() { + const compare = splitPaths[0][splitIndex]; + for (let i = 1; i < splitPaths.length; i++) { + if (compare !== splitPaths[i][splitIndex]) { + // a non-common index has been reached + return false; + } + } + return true; + } + // loop over all the search paths until there is a non-common ancestor or we go out of bounds + while (splitIndex < smallestPathLength) { + if (!isPathTheSame()) { + break; + } + // if all are the same, add to the end result & increment the index + commonPaths.push(splitPaths[0][splitIndex]); + splitIndex++; + } + return path.join(...commonPaths); +} +function findFilesToUpload(searchPath, globOptions) { + return __awaiter(this, void 0, void 0, function* () { + const searchResults = []; + const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions()); + const rawSearchResults = yield globber.glob(); + /* + Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten + Detect any files that could be overwritten for user awareness + */ + const set = new Set(); + /* + Directories will be rejected if attempted to be uploaded. This includes just empty + directories so filter any directories out from the raw search results + */ + for (const searchResult of rawSearchResults) { + const fileStats = yield stats(searchResult); + // isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead + if (!fileStats.isDirectory()) { + core_1.debug(`File:${searchResult} was found using the provided searchPath`); + searchResults.push(searchResult); + // detect any files that would be overwritten because of case insensitivity + if (set.has(searchResult.toLowerCase())) { + core_1.info(`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`); + } + else { + set.add(searchResult.toLowerCase()); + } + } + else { + core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`); + } + } + // Calculate the root directory for the artifact using the search paths that were utilized + const searchPaths = globber.getSearchPaths(); + if (searchPaths.length > 1) { + core_1.info(`Multiple search paths detected. Calculating the least common ancestor of all paths`); + const lcaSearchPath = getMultiPathLCA(searchPaths); + core_1.info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`); + return { + filesToUpload: searchResults, + rootDirectory: lcaSearchPath + }; + } + /* + Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is + not preserved and the root directory will be the single files parent directory + */ + if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) { + return { + filesToUpload: searchResults, + rootDirectory: path_1.dirname(searchResults[0]) + }; + } + return { + filesToUpload: searchResults, + rootDirectory: searchPaths[0] + }; + }); +} +exports.findFilesToUpload = findFilesToUpload; /***/ }), -/***/ 722: +/***/ 583: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; Object.defineProperty(exports, "__esModule", { value: true }); -exports.StatusReporter = void 0; -const core_1 = __webpack_require__(676); +const core = __importStar(__webpack_require__(470)); +const constants_1 = __webpack_require__(694); /** - * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded - * - * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable - * The total status of the upload/download gets displayed according to this value - * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + * Helper to get all the inputs for the action */ -class StatusReporter { - constructor(displayFrequencyInMilliseconds) { - this.totalNumberOfFilesToProcess = 0; - this.processedCount = 0; - this.largeFiles = new Map(); - this.totalFileStatus = undefined; - this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; - } - setTotalNumberOfFilesToProcess(fileTotal) { - this.totalNumberOfFilesToProcess = fileTotal; - this.processedCount = 0; - } - start() { - // displays information about the total upload/download status - this.totalFileStatus = setInterval(() => { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); - core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); - }, this.displayFrequencyInMilliseconds); - } - // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload - updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); - core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); +function getInputs() { + const name = core.getInput(constants_1.Inputs.Name); + const path = core.getInput(constants_1.Inputs.Path, { required: true }); + const ifNoFilesFound = core.getInput(constants_1.Inputs.IfNoFilesFound); + const noFileBehavior = constants_1.NoFileOptions[ifNoFilesFound]; + if (!noFileBehavior) { + core.setFailed(`Unrecognized ${constants_1.Inputs.IfNoFilesFound} input. Provided: ${ifNoFilesFound}. Available options: ${Object.keys(constants_1.NoFileOptions)}`); } - stop() { - if (this.totalFileStatus) { - clearInterval(this.totalFileStatus); + const inputs = { + artifactName: name, + searchPath: path, + ifNoFilesFound: noFileBehavior + }; + const retentionDaysStr = core.getInput(constants_1.Inputs.RetentionDays); + if (retentionDaysStr) { + inputs.retentionDays = parseInt(retentionDaysStr); + if (isNaN(inputs.retentionDays)) { + core.setFailed('Invalid retention-days'); } } - incrementProcessedCount() { - this.processedCount++; - } - formatPercentage(numerator, denominator) { - // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed - return ((numerator / denominator) * 100).toFixed(4).toString(); - } -} -exports.StatusReporter = StatusReporter; -//# sourceMappingURL=status-reporter.js.map - -/***/ }), - -/***/ 727: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = minimatch -minimatch.Minimatch = Minimatch - -var path = { sep: '/' } -try { - path = __webpack_require__(622) -} catch (er) {} - -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __webpack_require__(171) - -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' - -// * => any number of characters -var star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') - -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) -} - -// normalizes slashes. -var slashSplit = /\/+/ - -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) - } -} - -function ext (a, b) { - a = a || {} - b = b || {} - var t = {} - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - return t -} - -minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return minimatch - - var orig = minimatch - - var m = function minimatch (p, pattern, options) { - return orig.minimatch(p, pattern, ext(def, options)) - } - - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } - - return m -} - -Minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return Minimatch - return minimatch.defaults(def).Minimatch -} - -function minimatch (p, pattern, options) { - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } - - if (!options) options = {} - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } - - // "" only matches "" - if (pattern.trim() === '') return p === '' - - return new Minimatch(pattern, options).match(p) -} - -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } - - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } - - if (!options) options = {} - pattern = pattern.trim() - - // windows support: need to use /, not \ - if (path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } - - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - - // make the set of regexps etc. - this.make() + return inputs; } +exports.getInputs = getInputs; -Minimatch.prototype.debug = function () {} - -Minimatch.prototype.make = make -function make () { - // don't do it more than once. - if (this._made) return - - var pattern = this.pattern - var options = this.options - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } +/***/ }), - // step 1: figure out negation, etc. - this.parseNegate() +/***/ 590: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - // step 2: expand braces - var set = this.globSet = this.braceExpand() +"use strict"; - if (options.debug) this.debug = console.error +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUploadSpecification = void 0; +const fs = __importStar(__webpack_require__(747)); +const core_1 = __webpack_require__(470); +const path_1 = __webpack_require__(622); +const path_and_artifact_name_validation_1 = __webpack_require__(553); +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { + // artifact name was checked earlier on, no need to check again + const specifications = []; + if (!fs.existsSync(rootDirectory)) { + throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); + } + if (!fs.lstatSync(rootDirectory).isDirectory()) { + throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); + } + // Normalize and resolve, this allows for either absolute or relative paths to be used + rootDirectory = path_1.normalize(rootDirectory); + rootDirectory = path_1.resolve(rootDirectory); + /* + Example to demonstrate behavior + + Input: + artifactName: my-artifact + rootDirectory: '/home/user/files/plz-upload' + artifactFiles: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ] + + Output: + specifications: [ + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] + ] + */ + for (let file of artifactFiles) { + if (!fs.existsSync(file)) { + throw new Error(`File ${file} does not exist`); + } + if (!fs.lstatSync(file).isDirectory()) { + // Normalize and resolve, this allows for either absolute or relative paths to be used + file = path_1.normalize(file); + file = path_1.resolve(file); + if (!file.startsWith(rootDirectory)) { + throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); + } + // Check for forbidden characters in file paths that will be rejected during upload + const uploadPath = file.replace(rootDirectory, ''); + path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); + /* + uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all + be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts + + path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt + join('artifact-name/', 'file-to-upload.txt') + join('artifact-name/', '/file-to-upload.txt') + join('artifact-name', 'file-to-upload.txt') + join('artifact-name', '/file-to-upload.txt') + */ + specifications.push({ + absoluteFilePath: file, + uploadFilePath: path_1.join(artifactName, uploadPath) + }); + } + else { + // Directories are rejected by the server during upload + core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); + } + } + return specifications; +} +exports.getUploadSpecification = getUploadSpecification; +//# sourceMappingURL=upload-specification.js.map - this.debug(this.pattern, set) +/***/ }), - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) +/***/ 597: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - this.debug(this.pattern, set) +"use strict"; - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) +Object.defineProperty(exports, "__esModule", { value: true }); +const pathHelper = __webpack_require__(972); +const internal_match_kind_1 = __webpack_require__(327); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map - this.debug(this.pattern, set) +/***/ }), - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) +/***/ 601: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - this.debug(this.pattern, set) +"use strict"; - this.set = set +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __webpack_require__(470); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; } +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 +/***/ }), - if (options.nonegate) return +/***/ 605: +/***/ (function(module) { - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ - } +module.exports = require("http"); - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate -} +/***/ }), -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) -} +/***/ 608: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -Minimatch.prototype.braceExpand = braceExpand +"use strict"; -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options - } else { - options = {} +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UploadHttpClient = void 0; +const fs = __importStar(__webpack_require__(747)); +const core = __importStar(__webpack_require__(470)); +const tmp = __importStar(__webpack_require__(875)); +const stream = __importStar(__webpack_require__(413)); +const utils_1 = __webpack_require__(870); +const config_variables_1 = __webpack_require__(401); +const util_1 = __webpack_require__(669); +const url_1 = __webpack_require__(835); +const perf_hooks_1 = __webpack_require__(630); +const status_reporter_1 = __webpack_require__(176); +const http_client_1 = __webpack_require__(425); +const http_manager_1 = __webpack_require__(452); +const upload_gzip_1 = __webpack_require__(647); +const requestUtils_1 = __webpack_require__(489); +const stat = util_1.promisify(fs.stat); +class UploadHttpClient { + constructor() { + this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); + this.statusReporter = new status_reporter_1.StatusReporter(10000); + } + /** + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created + */ + createArtifactInFileContainer(artifactName, options) { + return __awaiter(this, void 0, void 0, function* () { + const parameters = { + Type: 'actions_storage', + Name: artifactName + }; + // calculate retention period + if (options && options.retentionDays) { + const maxRetentionStr = config_variables_1.getRetentionDays(); + parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr); + } + const data = JSON.stringify(parameters, null, 2); + const artifactUrl = utils_1.getArtifactUrl(); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + // Extra information to display when a particular HTTP code is returned + // If a 403 is returned when trying to create a file container, the customer has exceeded + // their storage quota so no new artifact containers can be created + const customErrorMessages = new Map([ + [ + http_client_1.HttpCodes.Forbidden, + 'Artifact storage quota has been hit. Unable to upload any new artifacts' + ], + [ + http_client_1.HttpCodes.BadRequest, + `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` + ] + ]); + const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); + const body = yield response.readBody(); + return JSON.parse(body); + }); + } + /** + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes + */ + uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { + return __awaiter(this, void 0, void 0, function* () { + const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); + const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); + core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parameters = []; + // by default, file uploads will continue if there is an error unless specified differently in the options + let continueOnError = true; + if (options) { + if (options.continueOnError === false) { + continueOnError = false; + } + } + // prepare the necessary parameters to upload all the files + for (const file of filesToUpload) { + const resourceUrl = new url_1.URL(uploadUrl); + resourceUrl.searchParams.append('itemPath', file.uploadFilePath); + parameters.push({ + file: file.absoluteFilePath, + resourceUrl: resourceUrl.toString(), + maxChunkSize: MAX_CHUNK_SIZE, + continueOnError + }); + } + const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; + const failedItemsToReport = []; + let currentFile = 0; + let completedFiles = 0; + let uploadFileSize = 0; + let totalFileSize = 0; + let abortPendingFileUploads = false; + this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); + this.statusReporter.start(); + // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors + yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < filesToUpload.length) { + const currentFileParameters = parameters[currentFile]; + currentFile += 1; + if (abortPendingFileUploads) { + failedItemsToReport.push(currentFileParameters.file); + continue; + } + const startTime = perf_hooks_1.performance.now(); + const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); + if (core.isDebug()) { + core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + } + uploadFileSize += uploadFileResult.successfulUploadSize; + totalFileSize += uploadFileResult.totalSize; + if (uploadFileResult.isSuccess === false) { + failedItemsToReport.push(currentFileParameters.file); + if (!continueOnError) { + // fail fast + core.error(`aborting artifact upload`); + abortPendingFileUploads = true; + } + } + this.statusReporter.incrementProcessedCount(); + } + }))); + this.statusReporter.stop(); + // done uploading, safety dispose all connections + this.uploadHttpManager.disposeAndReplaceAllClients(); + core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + return { + uploadSize: uploadFileSize, + totalSize: totalFileSize, + failedItems: failedItemsToReport + }; + }); + } + /** + * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. + * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ + uploadFileAsync(httpClientIndex, parameters) { + return __awaiter(this, void 0, void 0, function* () { + const fileStat = yield stat(parameters.file); + const totalFileSize = fileStat.size; + const isFIFO = fileStat.isFIFO(); + let offset = 0; + let isUploadSuccessful = true; + let failedChunkSizes = 0; + let uploadFileSize = 0; + let isGzip = true; + // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O + // for creating a new GZip file, an in-memory buffer is used for compression + // with named pipes the file size is reported as zero in that case don't read the file in memory + if (!isFIFO && totalFileSize < 65536) { + core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); + const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); + // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, + // it will not properly get reset to the start of the stream if a chunk upload needs to be retried + let openUploadStream; + if (totalFileSize < buffer.byteLength) { + // compression did not help with reducing the size, use a readable stream from the original file for upload + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + openUploadStream = () => fs.createReadStream(parameters.file); + isGzip = false; + uploadFileSize = totalFileSize; + } + else { + // create a readable stream using a PassThrough stream that is both readable and writable + core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); + openUploadStream = () => { + const passThrough = new stream.PassThrough(); + passThrough.end(buffer); + return passThrough; + }; + uploadFileSize = buffer.byteLength; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // chunk failed to upload + isUploadSuccessful = false; + failedChunkSizes += uploadFileSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + } + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + else { + // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the + // npm tmp-promise package and this file gets used to create a GZipped file + const tempFile = yield tmp.file(); + core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); + // create a GZip file of the original file being uploaded, the original file should not be modified in any way + uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); + let uploadFilePath = tempFile.path; + // compression did not help with size reduction, use the original file for upload and delete the temp GZip file + // for named pipes totalFileSize is zero, this assumes compression did help + if (!isFIFO && totalFileSize < uploadFileSize) { + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); + uploadFileSize = totalFileSize; + uploadFilePath = parameters.file; + isGzip = false; + } + else { + core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + } + let abortFileUpload = false; + // upload only a single chunk at a time + while (offset < uploadFileSize) { + const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); + const startChunkIndex = offset; + const endChunkIndex = offset + chunkSize - 1; + offset += parameters.maxChunkSize; + if (abortFileUpload) { + // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed + failedChunkSizes += chunkSize; + continue; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { + start: startChunkIndex, + end: endChunkIndex, + autoClose: false + }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was + // successfully uploaded so the server may report a different size for what was uploaded + isUploadSuccessful = false; + failedChunkSizes += chunkSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + abortFileUpload = true; + } + else { + // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status + if (uploadFileSize > 8388608) { + this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize); + } + } + } + // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by + // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about + core.debug(`deleting temporary gzip file ${tempFile.path}`); + yield tempFile.cleanup(); + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + }); } - } - - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern - - if (typeof pattern === 'undefined') { - throw new TypeError('undefined pattern') - } - - if (options.nobrace || - !pattern.match(/\{.*\}/)) { - // shortcut. no need to expand. - return [pattern] - } - - return expand(pattern) -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - if (pattern.length > 1024 * 64) { - throw new TypeError('pattern is too long') - } - - var options = this.options - - // shortcuts - if (!options.noglobstar && pattern === '**') return GLOBSTAR - if (pattern === '') return '' - - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this - - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false + /** + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded + * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream + * @param {number} totalFileSize Original total size of the file that is being uploaded + * @returns if the chunk was successfully uploaded + */ + uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { + return __awaiter(this, void 0, void 0, function* () { + // open a new stream and read it to compute the digest + const digest = yield utils_1.digestForStream(openStream()); + // prepare all the necessary headers before making any http call + const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.uploadHttpManager.getClient(httpClientIndex); + return yield client.sendStream('PUT', resourceUrl, openStream(), headers); + }); + let retryCount = 0; + const retryLimit = config_variables_1.getRetryLimit(); + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops + const incrementAndCheckRetryLimit = (response) => { + retryCount++; + if (retryCount > retryLimit) { + if (response) { + utils_1.displayHttpDiagnostics(response); + } + core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + return true; + } + return false; + }; + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + yield utils_1.sleep(retryAfterValue); + } + else { + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + yield utils_1.sleep(backoffTime); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + return; + }); + // allow for failed chunks to be retried multiple times + while (retryCount <= retryLimit) { + let response; + try { + response = yield uploadChunkRequest(); + } + catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the upload + core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + // eslint-disable-next-line no-console + console.log(error); + if (incrementAndCheckRetryLimit()) { + return false; + } + yield backOff(); + continue; + } + // Always read the body of the response. There is potential for a resource leak if the body is not read which will + // result in the connection remaining open along with unintended consequences when trying to dispose of the client + yield response.readBody(); + if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + return true; + } + else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + if (incrementAndCheckRetryLimit(response)) { + return false; + } + utils_1.isThrottledStatusCode(response.message.statusCode) + ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + : yield backOff(); + } + else { + core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + utils_1.displayHttpDiagnostics(response); + return false; + } + } + return false; + }); } - } - - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) - - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue + /** + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact + */ + patchArtifactSize(size, artifactName) { + return __awaiter(this, void 0, void 0, function* () { + const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); + resourceUrl.searchParams.append('artifactName', artifactName); + const parameters = { Size: size }; + const data = JSON.stringify(parameters, null, 2); + core.debug(`URL is ${resourceUrl.toString()}`); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + // Extra information to display when a particular HTTP code is returned + const customErrorMessages = new Map([ + [ + http_client_1.HttpCodes.NotFound, + `An Artifact with the name ${artifactName} was not found` + ] + ]); + // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this + const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); + yield response.readBody(); + core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + }); } +} +exports.UploadHttpClient = UploadHttpClient; +//# sourceMappingURL=upload-http-client.js.map - switch (c) { - case '/': - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - - case '\\': - clearStateChar() - escaping = true - continue - - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } - - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue - - case '(': - if (inClass) { - re += '(' - continue - } - - if (!stateChar) { - re += '\\(' - continue - } - - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } - - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue - - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } - - clearStateChar() - re += '|' - continue - - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() - - if (inClass) { - re += '\\' + c - continue - } - - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } - - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - if (inClass) { - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } - } - - // finish up the class. - hasMagic = true - inClass = false - re += c - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } +/***/ }), - re += c +/***/ 614: +/***/ (function(module) { - } // switch - } // for +module.exports = require("events"); - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } +/***/ }), - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } +/***/ 621: +/***/ (function(module) { - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) +"use strict"; - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } + var r = range(a, b, str); - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case '.': - case '[': - case '(': addPatternStart = true - } +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n] +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; - nlLast += nlAfter + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter + bi = str.indexOf(b, i + 1); + } - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' + i = ai < bi && ai >= 0 ? ai : bi; } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe - } - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re + if (begs.length) { + result = [ left, right ]; + } } - if (addPatternStart) { - re = patternStart + re - } + return result; +} - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } +/***/ }), - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } +/***/ 622: +/***/ (function(module) { - regExp._glob = pattern - regExp._src = re +module.exports = require("path"); - return regExp -} +/***/ }), -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() -} +/***/ 630: +/***/ (function(module) { -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp +module.exports = require("perf_hooks"); - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set +/***/ }), - if (!set.length) { - this.regexp = false - return this.regexp - } - var options = this.options +/***/ 631: +/***/ (function(module) { - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' +module.exports = require("net"); - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') +/***/ }), - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' +/***/ 647: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' +"use strict"; - try { - this.regexp = new RegExp(re, flags) - } catch (ex) { - this.regexp = false - } - return this.regexp +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0; +const fs = __importStar(__webpack_require__(747)); +const zlib = __importStar(__webpack_require__(761)); +const util_1 = __webpack_require__(669); +const stat = util_1.promisify(fs.stat); +/** + * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip + * files then will just waste a lot of time before ultimately being discarded (especially for very large files). + * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is + */ +const gzipExemptFileExtensions = [ + '.gzip', + '.zip', + '.tar.lz', + '.tar.gz', + '.tar.bz2', + '.7z' +]; +/** + * Creates a Gzip compressed file of an original file at the provided temporary filepath location + * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified + * @param {string} tempFilePath the location of where the Gzip file will be created + * @returns the size of gzip file that gets created + */ +function createGZipFileOnDisk(originalFilePath, tempFilePath) { + return __awaiter(this, void 0, void 0, function* () { + for (const gzipExemptExtension of gzipExemptFileExtensions) { + if (originalFilePath.endsWith(gzipExemptExtension)) { + // return a really large number so that the original file gets uploaded + return Number.MAX_SAFE_INTEGER; + } + } + return new Promise((resolve, reject) => { + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + const outputStream = fs.createWriteStream(tempFilePath); + inputStream.pipe(gzip).pipe(outputStream); + outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { + // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload + const size = (yield stat(tempFilePath)).size; + resolve(size); + })); + outputStream.on('error', error => { + // eslint-disable-next-line no-console + console.log(error); + reject; + }); + }); + }); } - -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list +exports.createGZipFileOnDisk = createGZipFileOnDisk; +/** + * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O + * @param originalFilePath the path to the original file that is being GZipped + * @returns a buffer with the GZip file + */ +function createGZipFileInBuffer(originalFilePath) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + var e_1, _a; + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + inputStream.pipe(gzip); + // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 + const chunks = []; + try { + for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { + const chunk = gzip_1_1.value; + chunks.push(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); + } + finally { if (e_1) throw e_1.error; } + } + resolve(Buffer.concat(chunks)); + })); + }); } +exports.createGZipFileInBuffer = createGZipFileInBuffer; +//# sourceMappingURL=upload-gzip.js.map -Minimatch.prototype.match = match -function match (f, partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true +/***/ }), - var options = this.options +/***/ 669: +/***/ (function(module) { - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } +module.exports = require("util"); - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) +/***/ }), - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. +/***/ 674: +/***/ (function(module, __unusedexports, __webpack_require__) { - var set = this.set - this.debug(this.pattern, 'set', set) +var wrappy = __webpack_require__(11) +var reqs = Object.create(null) +var once = __webpack_require__(49) - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } +module.exports = wrappy(inflight) - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate } -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) - - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] - - this.debug(pattern, p, f) - - // should be impossible. - // some invalid regexp stuff in the set. - if (p === false) return false - - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) - - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } - - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] - - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } - - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - if (options.nocase) { - hit = f.toLowerCase() === p.toLowerCase() + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) } else { - hit = f === p - } - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') - return emptyFileEnd - } - - // should be unreachable. - throw new Error('wtf?') + delete reqs[key] + } + } + }) } -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') -} +function slice (args) { + var length = args.length + var array = [] -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') + for (var i = 0; i < length; i++) array[i] = args[i] + return array } /***/ }), -/***/ 731: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var wrappy = __webpack_require__(50) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +/***/ 681: +/***/ (function(module) { -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) +"use strict"; - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f +function posix(path) { + return path.charAt(0) === '/'; } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); } +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; + /***/ }), -/***/ 747: -/***/ (function(module) { +/***/ 689: +/***/ (function(module, __unusedexports, __webpack_require__) { + +try { + var util = __webpack_require__(669); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __webpack_require__(315); +} -module.exports = require("fs"); /***/ }), -/***/ 750: +/***/ 694: /***/ (function(__unusedmodule, exports) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; -// The number of concurrent uploads that happens at the same time -function getUploadFileConcurrency() { - return 2; -} -exports.getUploadFileConcurrency = getUploadFileConcurrency; -// When uploading large files that can't be uploaded with a single http call, this controls -// the chunk size that is used during upload -function getUploadChunkSize() { - return 8 * 1024 * 1024; // 8 MB Chunks -} -exports.getUploadChunkSize = getUploadChunkSize; -// The maximum number of retries that can be attempted before an upload or download fails -function getRetryLimit() { - return 5; -} -exports.getRetryLimit = getRetryLimit; -// With exponential backoff, the larger the retry count, the larger the wait time before another attempt -// The retry multiplier controls by how much the backOff time increases depending on the number of retries -function getRetryMultiplier() { - return 1.5; -} -exports.getRetryMultiplier = getRetryMultiplier; -// The initial wait time if an upload or download fails and a retry is being attempted for the first time -function getInitialRetryIntervalInMilliseconds() { - return 3000; -} -exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; -// The number of concurrent downloads that happens at the same time -function getDownloadFileConcurrency() { - return 2; -} -exports.getDownloadFileConcurrency = getDownloadFileConcurrency; -function getRuntimeToken() { - const token = process.env['ACTIONS_RUNTIME_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); - } - return token; -} -exports.getRuntimeToken = getRuntimeToken; -function getRuntimeUrl() { - const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); - } - return runtimeUrl; -} -exports.getRuntimeUrl = getRuntimeUrl; -function getWorkFlowRunId() { - const workFlowRunId = process.env['GITHUB_RUN_ID']; - if (!workFlowRunId) { - throw new Error('Unable to get GITHUB_RUN_ID env variable'); - } - return workFlowRunId; -} -exports.getWorkFlowRunId = getWorkFlowRunId; -function getWorkSpaceDirectory() { - const workspaceDirectory = process.env['GITHUB_WORKSPACE']; - if (!workspaceDirectory) { - throw new Error('Unable to get GITHUB_WORKSPACE env variable'); - } - return workspaceDirectory; -} -exports.getWorkSpaceDirectory = getWorkSpaceDirectory; -function getRetentionDays() { - return process.env['GITHUB_RETENTION_DAYS']; -} -exports.getRetentionDays = getRetentionDays; -//# sourceMappingURL=config-variables.js.map - -/***/ }), - -/***/ 761: -/***/ (function(module) { +var Inputs; +(function (Inputs) { + Inputs["Name"] = "name"; + Inputs["Path"] = "path"; + Inputs["IfNoFilesFound"] = "if-no-files-found"; + Inputs["RetentionDays"] = "retention-days"; +})(Inputs = exports.Inputs || (exports.Inputs = {})); +var NoFileOptions; +(function (NoFileOptions) { + /** + * Default. Output a warning but do not fail the action + */ + NoFileOptions["warn"] = "warn"; + /** + * Fail the action with an error message + */ + NoFileOptions["error"] = "error"; + /** + * Do not output any warnings or errors, the action does not fail + */ + NoFileOptions["ignore"] = "ignore"; +})(NoFileOptions = exports.NoFileOptions || (exports.NoFileOptions = {})); -module.exports = require("zlib"); /***/ }), -/***/ 799: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 728: +/***/ (function(__unusedmodule, exports) { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const core = __importStar(__webpack_require__(676)); -const artifact_1 = __webpack_require__(707); -const search_1 = __webpack_require__(923); -const input_helper_1 = __webpack_require__(628); -const constants_1 = __webpack_require__(858); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const inputs = input_helper_1.getInputs(); - const searchResult = yield search_1.findFilesToUpload(inputs.searchPath); - if (searchResult.filesToUpload.length === 0) { - // No files were found, different use cases warrant different types of behavior if nothing is found - switch (inputs.ifNoFilesFound) { - case constants_1.NoFileOptions.warn: { - core.warning(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); - break; - } - case constants_1.NoFileOptions.error: { - core.setFailed(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); - break; - } - case constants_1.NoFileOptions.ignore: { - core.info(`No files were found with the provided path: ${inputs.searchPath}. No artifacts will be uploaded.`); - break; - } - } - } - else { - const s = searchResult.filesToUpload.length === 1 ? '' : 's'; - core.info(`With the provided path, there will be ${searchResult.filesToUpload.length} file${s} uploaded`); - core.debug(`Root artifact directory is ${searchResult.rootDirectory}`); - if (searchResult.filesToUpload.length > 10000) { - core.warning(`There are over 10,000 files in this artifact, consider creating an archive before upload to improve the upload performance.`); - } - const artifactClient = artifact_1.create(); - const options = { - continueOnError: false - }; - if (inputs.retentionDays) { - options.retentionDays = inputs.retentionDays; - } - const uploadResponse = yield artifactClient.uploadArtifact(inputs.artifactName, searchResult.filesToUpload, searchResult.rootDirectory, options); - if (uploadResponse.failedItems.length > 0) { - core.setFailed(`An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.`); - } - else { - core.info(`Artifact ${uploadResponse.artifactName} has been successfully uploaded!`); - } - } - } - catch (err) { - core.setFailed(err.message); - } - }); +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } } -run(); +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map + +/***/ }), +/***/ 747: +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), + +/***/ 761: +/***/ (function(module) { + +module.exports = require("zlib"); + +/***/ }), + +/***/ 835: +/***/ (function(module) { + +module.exports = require("url"); /***/ }), -/***/ 800: +/***/ 855: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; @@ -8227,1374 +8288,1313 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.UploadHttpClient = void 0; +exports.DownloadHttpClient = void 0; const fs = __importStar(__webpack_require__(747)); -const core = __importStar(__webpack_require__(676)); -const tmp = __importStar(__webpack_require__(719)); -const stream = __importStar(__webpack_require__(413)); -const utils_1 = __webpack_require__(128); -const config_variables_1 = __webpack_require__(750); -const util_1 = __webpack_require__(669); +const core = __importStar(__webpack_require__(470)); +const zlib = __importStar(__webpack_require__(761)); +const utils_1 = __webpack_require__(870); const url_1 = __webpack_require__(835); +const status_reporter_1 = __webpack_require__(176); const perf_hooks_1 = __webpack_require__(630); -const status_reporter_1 = __webpack_require__(722); -const http_client_1 = __webpack_require__(582); -const http_manager_1 = __webpack_require__(851); -const upload_gzip_1 = __webpack_require__(139); -const requestUtils_1 = __webpack_require__(682); -const stat = util_1.promisify(fs.stat); -class UploadHttpClient { +const http_manager_1 = __webpack_require__(452); +const config_variables_1 = __webpack_require__(401); +const requestUtils_1 = __webpack_require__(489); +class DownloadHttpClient { constructor() { - this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); - this.statusReporter = new status_reporter_1.StatusReporter(10000); + this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download'); + // downloads are usually significantly faster than uploads so display status information every second + this.statusReporter = new status_reporter_1.StatusReporter(1000); } /** - * Creates a file container for the new artifact in the remote blob storage/file service - * @param {string} artifactName Name of the artifact being created - * @returns The response from the Artifact Service if the file container was successfully created + * Gets a list of all artifacts that are in a specific container */ - createArtifactInFileContainer(artifactName, options) { + listArtifacts() { return __awaiter(this, void 0, void 0, function* () { - const parameters = { - Type: 'actions_storage', - Name: artifactName - }; - // calculate retention period - if (options && options.retentionDays) { - const maxRetentionStr = config_variables_1.getRetentionDays(); - parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr); - } - const data = JSON.stringify(parameters, null, 2); const artifactUrl = utils_1.getArtifactUrl(); // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); - // Extra information to display when a particular HTTP code is returned - // If a 403 is returned when trying to create a file container, the customer has exceeded - // their storage quota so no new artifact containers can be created - const customErrorMessages = new Map([ - [ - http_client_1.HttpCodes.Forbidden, - 'Artifact storage quota has been hit. Unable to upload any new artifacts' - ], - [ - http_client_1.HttpCodes.BadRequest, - `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` - ] - ]); - const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); + const client = this.downloadHttpManager.getClient(0); + const headers = utils_1.getDownloadHeaders('application/json'); + const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); const body = yield response.readBody(); return JSON.parse(body); }); } /** - * Concurrently upload all of the files in chunks - * @param {string} uploadUrl Base Url for the artifact that was created - * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded - * @returns The size of all the files uploaded in bytes + * Fetches a set of container items that describe the contents of an artifact + * @param artifactName the name of the artifact + * @param containerUrl the artifact container URL for the run */ - uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { + getContainerItems(artifactName, containerUrl) { return __awaiter(this, void 0, void 0, function* () { - const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); - const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); - core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); - const parameters = []; - // by default, file uploads will continue if there is an error unless specified differently in the options - let continueOnError = true; - if (options) { - if (options.continueOnError === false) { - continueOnError = false; - } - } - // prepare the necessary parameters to upload all the files - for (const file of filesToUpload) { - const resourceUrl = new url_1.URL(uploadUrl); - resourceUrl.searchParams.append('itemPath', file.uploadFilePath); - parameters.push({ - file: file.absoluteFilePath, - resourceUrl: resourceUrl.toString(), - maxChunkSize: MAX_CHUNK_SIZE, - continueOnError - }); - } - const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; - const failedItemsToReport = []; - let currentFile = 0; - let completedFiles = 0; - let uploadFileSize = 0; - let totalFileSize = 0; - let abortPendingFileUploads = false; - this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); - this.statusReporter.start(); - // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors - yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { - while (currentFile < filesToUpload.length) { - const currentFileParameters = parameters[currentFile]; - currentFile += 1; - if (abortPendingFileUploads) { - failedItemsToReport.push(currentFileParameters.file); - continue; - } - const startTime = perf_hooks_1.performance.now(); - const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core.isDebug()) { - core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); - } - uploadFileSize += uploadFileResult.successfulUploadSize; - totalFileSize += uploadFileResult.totalSize; - if (uploadFileResult.isSuccess === false) { - failedItemsToReport.push(currentFileParameters.file); - if (!continueOnError) { - // fail fast - core.error(`aborting artifact upload`); - abortPendingFileUploads = true; - } - } - this.statusReporter.incrementProcessedCount(); - } - }))); - this.statusReporter.stop(); - // done uploading, safety dispose all connections - this.uploadHttpManager.disposeAndReplaceAllClients(); - core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); - return { - uploadSize: uploadFileSize, - totalSize: totalFileSize, - failedItems: failedItemsToReport - }; + // the itemPath search parameter controls which containers will be returned + const resourceUrl = new url_1.URL(containerUrl); + resourceUrl.searchParams.append('itemPath', artifactName); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.downloadHttpManager.getClient(0); + const headers = utils_1.getDownloadHeaders('application/json'); + const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); + const body = yield response.readBody(); + return JSON.parse(body); }); } /** - * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. - * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls - * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls - * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded - * @returns The size of the file that was uploaded in bytes along with any failed uploads + * Concurrently downloads all the files that are part of an artifact + * @param downloadItems information about what items to download and where to save them */ - uploadFileAsync(httpClientIndex, parameters) { - return __awaiter(this, void 0, void 0, function* () { - const fileStat = yield stat(parameters.file); - const totalFileSize = fileStat.size; - const isFIFO = fileStat.isFIFO(); - let offset = 0; - let isUploadSuccessful = true; - let failedChunkSizes = 0; - let uploadFileSize = 0; - let isGzip = true; - // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O - // for creating a new GZip file, an in-memory buffer is used for compression - // with named pipes the file size is reported as zero in that case don't read the file in memory - if (!isFIFO && totalFileSize < 65536) { - core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); - const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); - // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, - // it will not properly get reset to the start of the stream if a chunk upload needs to be retried - let openUploadStream; - if (totalFileSize < buffer.byteLength) { - // compression did not help with reducing the size, use a readable stream from the original file for upload - core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - openUploadStream = () => fs.createReadStream(parameters.file); - isGzip = false; - uploadFileSize = totalFileSize; - } - else { - // create a readable stream using a PassThrough stream that is both readable and writable - core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); - openUploadStream = () => { - const passThrough = new stream.PassThrough(); - passThrough.end(buffer); - return passThrough; - }; - uploadFileSize = buffer.byteLength; - } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); - if (!result) { - // chunk failed to upload - isUploadSuccessful = false; - failedChunkSizes += uploadFileSize; - core.warning(`Aborting upload for ${parameters.file} due to failure`); - } - return { - isSuccess: isUploadSuccessful, - successfulUploadSize: uploadFileSize - failedChunkSizes, - totalSize: totalFileSize - }; - } - else { - // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the - // npm tmp-promise package and this file gets used to create a GZipped file - const tempFile = yield tmp.file(); - core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); - // create a GZip file of the original file being uploaded, the original file should not be modified in any way - uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); - let uploadFilePath = tempFile.path; - // compression did not help with size reduction, use the original file for upload and delete the temp GZip file - // for named pipes totalFileSize is zero, this assumes compression did help - if (!isFIFO && totalFileSize < uploadFileSize) { - core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); - uploadFileSize = totalFileSize; - uploadFilePath = parameters.file; - isGzip = false; - } - else { - core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); - } - let abortFileUpload = false; - // upload only a single chunk at a time - while (offset < uploadFileSize) { - const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); - const startChunkIndex = offset; - const endChunkIndex = offset + chunkSize - 1; - offset += parameters.maxChunkSize; - if (abortFileUpload) { - // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed - failedChunkSizes += chunkSize; - continue; - } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { - start: startChunkIndex, - end: endChunkIndex, - autoClose: false - }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize); - if (!result) { - // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was - // successfully uploaded so the server may report a different size for what was uploaded - isUploadSuccessful = false; - failedChunkSizes += chunkSize; - core.warning(`Aborting upload for ${parameters.file} due to failure`); - abortFileUpload = true; - } - else { - // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status - if (uploadFileSize > 8388608) { - this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize); - } + downloadSingleArtifact(downloadItems) { + return __awaiter(this, void 0, void 0, function* () { + const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency(); + // limit the number of files downloaded at a single time + core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); + const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; + let currentFile = 0; + let downloadedFiles = 0; + core.info(`Total number of files that will be downloaded: ${downloadItems.length}`); + this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length); + this.statusReporter.start(); + yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < downloadItems.length) { + const currentFileToDownload = downloadItems[currentFile]; + currentFile += 1; + const startTime = perf_hooks_1.performance.now(); + yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath); + if (core.isDebug()) { + core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`); } + this.statusReporter.incrementProcessedCount(); } - // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by - // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about - core.debug(`deleting temporary gzip file ${tempFile.path}`); - yield tempFile.cleanup(); - return { - isSuccess: isUploadSuccessful, - successfulUploadSize: uploadFileSize - failedChunkSizes, - totalSize: totalFileSize - }; - } + }))) + .catch(error => { + throw new Error(`Unable to download the artifact: ${error}`); + }) + .finally(() => { + this.statusReporter.stop(); + // safety dispose all connections + this.downloadHttpManager.disposeAndReplaceAllClients(); + }); }); } /** - * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code - * indicates a retryable status, we try to upload the chunk as well - * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls - * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to - * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded - * @param {number} start Starting byte index of file that the chunk belongs to - * @param {number} end Ending byte index of file that the chunk belongs to - * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded - * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream - * @param {number} totalFileSize Original total size of the file that is being uploaded - * @returns if the chunk was successfully uploaded + * Downloads an individual file + * @param httpClientIndex the index of the http client that is used to make all of the calls + * @param artifactLocation origin location where a file will be downloaded from + * @param downloadPath destination location for the file being downloaded */ - uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { + downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) { return __awaiter(this, void 0, void 0, function* () { - // open a new stream and read it to compute the digest - const digest = yield utils_1.digestForStream(openStream()); - // prepare all the necessary headers before making any http call - const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest); - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - const client = this.uploadHttpManager.getClient(httpClientIndex); - return yield client.sendStream('PUT', resourceUrl, openStream(), headers); - }); let retryCount = 0; const retryLimit = config_variables_1.getRetryLimit(); + let destinationStream = fs.createWriteStream(downloadPath); + const headers = utils_1.getDownloadHeaders('application/json', true, true); + // a single GET request is used to download a file + const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.downloadHttpManager.getClient(httpClientIndex); + return yield client.get(artifactLocation, headers); + }); + // check the response headers to determine if the file was compressed using gzip + const isGzip = (incomingHeaders) => { + return ('content-encoding' in incomingHeaders && + incomingHeaders['content-encoding'] === 'gzip'); + }; // Increments the current retry count and then checks if the retry limit has been reached - // If there have been too many retries, fail so the download stops - const incrementAndCheckRetryLimit = (response) => { + // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided, + // it will be used + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { retryCount++; if (retryCount > retryLimit) { - if (response) { - utils_1.displayHttpDiagnostics(response); + return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`)); + } + else { + this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + // Back off by waiting the specified time denoted by the retry-after header + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); + yield utils_1.sleep(retryAfterValue); } - core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + else { + // Back off using an exponential value that depends on the retry count + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); + yield utils_1.sleep(backoffTime); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with download`); + } + }); + const isAllBytesReceived = (expected, received) => { + // be lenient, if any input is missing, assume success, i.e. not truncated + if (!expected || + !received || + process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) { + core.info('Skipping download validation.'); return true; } - return false; + return parseInt(expected) === received; }; - const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { - this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); - if (retryAfterValue) { - core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); - yield utils_1.sleep(retryAfterValue); - } - else { - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); - core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); - yield utils_1.sleep(backoffTime); - } - core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); - return; + const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () { + destinationStream.close(); + yield utils_1.rmFile(fileDownloadPath); + destinationStream = fs.createWriteStream(fileDownloadPath); }); - // allow for failed chunks to be retried multiple times + // keep trying to download a file until a retry limit has been reached while (retryCount <= retryLimit) { let response; try { - response = yield uploadChunkRequest(); + response = yield makeDownloadRequest(); } catch (error) { - // if an error is caught, it is usually indicative of a timeout so retry the upload - core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + // if an error is caught, it is usually indicative of a timeout so retry the download + core.info('An error occurred while attempting to download a file'); // eslint-disable-next-line no-console console.log(error); - if (incrementAndCheckRetryLimit()) { - return false; - } + // increment the retryCount and use exponential backoff to wait before making the next request yield backOff(); continue; } - // Always read the body of the response. There is potential for a resource leak if the body is not read which will - // result in the connection remaining open along with unintended consequences when trying to dispose of the client - yield response.readBody(); + let forceRetry = false; if (utils_1.isSuccessStatusCode(response.message.statusCode)) { - return true; - } - else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { - core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); - if (incrementAndCheckRetryLimit(response)) { - return false; - } - utils_1.isThrottledStatusCode(response.message.statusCode) - ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) - : yield backOff(); - } - else { - core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); - utils_1.displayHttpDiagnostics(response); - return false; - } - } - return false; - }); - } - /** - * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. - * Updating the size indicates that we are done uploading all the contents of the artifact - */ - patchArtifactSize(size, artifactName) { - return __awaiter(this, void 0, void 0, function* () { - const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); - resourceUrl.searchParams.append('artifactName', artifactName); - const parameters = { Size: size }; - const data = JSON.stringify(parameters, null, 2); - core.debug(`URL is ${resourceUrl.toString()}`); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); - // Extra information to display when a particular HTTP code is returned - const customErrorMessages = new Map([ - [ - http_client_1.HttpCodes.NotFound, - `An Artifact with the name ${artifactName} was not found` - ] - ]); - // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this - const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); - yield response.readBody(); - core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); - }); - } -} -exports.UploadHttpClient = UploadHttpClient; -//# sourceMappingURL=upload-http-client.js.map - -/***/ }), - -/***/ 835: -/***/ (function(module) { - -module.exports = require("url"); - -/***/ }), - -/***/ 851: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -exports.HttpManager = void 0; -const utils_1 = __webpack_require__(128); -/** - * Used for managing http clients during either upload or download - */ -class HttpManager { - constructor(clientCount, userAgent) { - if (clientCount < 1) { - throw new Error('There must be at least one client'); - } - this.userAgent = userAgent; - this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); - } - getClient(index) { - return this.clients[index]; - } - // client disposal is necessary if a keep-alive connection is used to properly close the connection - // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 - disposeAndReplaceClient(index) { - this.clients[index].dispose(); - this.clients[index] = utils_1.createHttpClient(this.userAgent); - } - disposeAndReplaceAllClients() { - for (const [index] of this.clients.entries()) { - this.disposeAndReplaceClient(index); - } - } -} -exports.HttpManager = HttpManager; -//# sourceMappingURL=http-manager.js.map - -/***/ }), - -/***/ 857: -/***/ (function(module) { - -module.exports = function (xs, fn) { - var res = []; - for (var i = 0; i < xs.length; i++) { - var x = fn(xs[i], i); - if (isArray(x)) res.push.apply(res, x); - else res.push(x); + // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string + // which can cause some gzip encoded data to be lost + // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents + try { + const isGzipped = isGzip(response.message.headers); + yield this.pipeResponseToFile(response, destinationStream, isGzipped); + if (isGzipped || + isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) { + return; + } + else { + forceRetry = true; + } + } + catch (error) { + // retry on error, most likely streams were corrupted + forceRetry = true; + } + } + if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); + resetDestinationStream(downloadPath); + // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff + utils_1.isThrottledStatusCode(response.message.statusCode) + ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + : yield backOff(); + } + else { + // Some unexpected response code, fail immediately and stop the download + utils_1.displayHttpDiagnostics(response); + return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`)); + } + } + }); } - return res; -}; - -var isArray = Array.isArray || function (xs) { - return Object.prototype.toString.call(xs) === '[object Array]'; -}; - - -/***/ }), - -/***/ 858: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -var Inputs; -(function (Inputs) { - Inputs["Name"] = "name"; - Inputs["Path"] = "path"; - Inputs["IfNoFilesFound"] = "if-no-files-found"; - Inputs["RetentionDays"] = "retention-days"; -})(Inputs = exports.Inputs || (exports.Inputs = {})); -var NoFileOptions; -(function (NoFileOptions) { - /** - * Default. Output a warning but do not fail the action - */ - NoFileOptions["warn"] = "warn"; - /** - * Fail the action with an error message - */ - NoFileOptions["error"] = "error"; /** - * Do not output any warnings or errors, the action does not fail + * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary + * @param response the http response received when downloading a file + * @param destinationStream the stream where the file should be written to + * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it */ - NoFileOptions["ignore"] = "ignore"; -})(NoFileOptions = exports.NoFileOptions || (exports.NoFileOptions = {})); - - -/***/ }), - -/***/ 898: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __webpack_require__(676); -/** - * Returns a copy with defaults filled in. - */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } - } - return result; -} -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map - -/***/ }), - -/***/ 909: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = realpath -realpath.realpath = realpath -realpath.sync = realpathSync -realpath.realpathSync = realpathSync -realpath.monkeypatch = monkeypatch -realpath.unmonkeypatch = unmonkeypatch - -var fs = __webpack_require__(747) -var origRealpath = fs.realpath -var origRealpathSync = fs.realpathSync - -var version = process.version -var ok = /^v[0-5]\./.test(version) -var old = __webpack_require__(313) - -function newError (er) { - return er && er.syscall === 'realpath' && ( - er.code === 'ELOOP' || - er.code === 'ENOMEM' || - er.code === 'ENAMETOOLONG' - ) -} - -function realpath (p, cache, cb) { - if (ok) { - return origRealpath(p, cache, cb) - } - - if (typeof cache === 'function') { - cb = cache - cache = null - } - origRealpath(p, cache, function (er, result) { - if (newError(er)) { - old.realpath(p, cache, cb) - } else { - cb(er, result) - } - }) -} - -function realpathSync (p, cache) { - if (ok) { - return origRealpathSync(p, cache) - } - - try { - return origRealpathSync(p, cache) - } catch (er) { - if (newError(er)) { - return old.realpathSync(p, cache) - } else { - throw er + pipeResponseToFile(response, destinationStream, isGzip) { + return __awaiter(this, void 0, void 0, function* () { + yield new Promise((resolve, reject) => { + if (isGzip) { + const gunzip = zlib.createGunzip(); + response.message + .on('error', error => { + core.error(`An error occurred while attempting to read the response stream`); + gunzip.close(); + destinationStream.close(); + reject(error); + }) + .pipe(gunzip) + .on('error', error => { + core.error(`An error occurred while attempting to decompress the response stream`); + destinationStream.close(); + reject(error); + }) + .pipe(destinationStream) + .on('close', () => { + resolve(); + }) + .on('error', error => { + core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + reject(error); + }); + } + else { + response.message + .on('error', error => { + core.error(`An error occurred while attempting to read the response stream`); + destinationStream.close(); + reject(error); + }) + .pipe(destinationStream) + .on('close', () => { + resolve(); + }) + .on('error', error => { + core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + reject(error); + }); + } + }); + return; + }); } - } -} - -function monkeypatch () { - fs.realpath = realpath - fs.realpathSync = realpathSync -} - -function unmonkeypatch () { - fs.realpath = origRealpath - fs.realpathSync = origRealpathSync } - +exports.DownloadHttpClient = DownloadHttpClient; +//# sourceMappingURL=download-http-client.js.map /***/ }), -/***/ 913: +/***/ 856: /***/ (function(__unusedmodule, exports, __webpack_require__) { -"use strict"; - - -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} +exports.alphasort = alphasort +exports.alphasorti = alphasorti +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) } -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; +var path = __webpack_require__(622) +var minimatch = __webpack_require__(93) +var isAbsolute = __webpack_require__(681) +var Minimatch = minimatch.Minimatch + +function alphasorti (a, b) { + return a.toLowerCase().localeCompare(b.toLowerCase()) } -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; +function alphasort (a, b) { + return a.localeCompare(b) } +function setupIgnores (self, options) { + self.ignore = options.ignore || [] -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } } -util.inherits(TunnelingAgent, events.EventEmitter); -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher } +} - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); +function setopts (self, pattern, options) { + if (!options) + options = {} - function onFree() { - self.emit('free', socket, options); + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") } + pattern = "**/" + pattern + } - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd } - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } } - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); + if (!nou) + all = Object.keys(all) - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + if (!self.nosort) + all = all.sort(self.nocase ? alphasorti : alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); } - function onError(cause) { - connectReq.removeAllListeners(); + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } } -}; -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); + return m +} - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) } -}; -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); + return abs } -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } - } - return target; + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) } +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) } -exports.debug = debug; // for test /***/ }), -/***/ 917: -/***/ (function(module) { +/***/ 870: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.digestForStream = exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0; +const crypto_1 = __importDefault(__webpack_require__(417)); +const fs_1 = __webpack_require__(747); +const core_1 = __webpack_require__(470); +const http_client_1 = __webpack_require__(425); +const auth_1 = __webpack_require__(554); +const config_variables_1 = __webpack_require__(401); +const crc64_1 = __importDefault(__webpack_require__(112)); +/** + * Returns a retry time in milliseconds that exponentially gets larger + * depending on the amount of retries that have been attempted + */ +function getExponentialRetryTimeInMilliseconds(retryCount) { + if (retryCount < 0) { + throw new Error('RetryCount should not be negative'); + } + else if (retryCount === 0) { + return config_variables_1.getInitialRetryIntervalInMilliseconds(); + } + const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; + const maxTime = minTime * config_variables_1.getRetryMultiplier(); + // returns a random number between the minTime (inclusive) and the maxTime (exclusive) + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); +} +exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds; +/** + * Parses a env variable that is a number + */ +function parseEnvNumber(key) { + const value = Number(process.env[key]); + if (Number.isNaN(value) || value < 0) { + return undefined; + } + return value; +} +exports.parseEnvNumber = parseEnvNumber; +/** + * Various utility functions to help with the necessary API calls + */ +function getApiVersion() { + return '6.0-preview'; +} +exports.getApiVersion = getApiVersion; +function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; +} +exports.isSuccessStatusCode = isSuccessStatusCode; +function isForbiddenStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode === http_client_1.HttpCodes.Forbidden; +} +exports.isForbiddenStatusCode = isForbiddenStatusCode; +function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.TooManyRequests, + 413 // Payload Too Large + ]; + return retryableStatusCodes.includes(statusCode); +} +exports.isRetryableStatusCode = isRetryableStatusCode; +function isThrottledStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode === http_client_1.HttpCodes.TooManyRequests; +} +exports.isThrottledStatusCode = isThrottledStatusCode; +/** + * Attempts to get the retry-after value from a set of http headers. The retry time + * is originally denoted in seconds, so if present, it is converted to milliseconds + * @param headers all the headers received when making an http call + */ +function tryGetRetryAfterValueTimeInMilliseconds(headers) { + if (headers['retry-after']) { + const retryTime = Number(headers['retry-after']); + if (!isNaN(retryTime)) { + core_1.info(`Retry-After header is present with a value of ${retryTime}`); + return retryTime * 1000; } - }) + core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); + return undefined; + } + core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`); + // eslint-disable-next-line no-console + console.log(headers); + return undefined; +} +exports.tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds; +function getContentRange(start, end, total) { + // Format: `bytes start-end/fileSize + // start and end are inclusive + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/200 + return `bytes ${start}-${end}/${total}`; +} +exports.getContentRange = getContentRange; +/** + * Sets all the necessary headers when downloading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} acceptGzip can we accept a gzip encoded response + * @param {string} acceptType the type of content that we can accept + * @returns appropriate headers to make a specific http call during artifact download + */ +function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) { + const requestOptions = {}; + if (contentType) { + requestOptions['Content-Type'] = contentType; } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive'; + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10'; } - } -} - - -/***/ }), - -/***/ 919: -/***/ (function(module, __unusedexports, __webpack_require__) { - -try { - var util = __webpack_require__(669); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __webpack_require__(917); -} - - -/***/ }), - -/***/ 923: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const glob = __importStar(__webpack_require__(166)); -const path = __importStar(__webpack_require__(622)); -const core_1 = __webpack_require__(676); -const fs_1 = __webpack_require__(747); -const path_1 = __webpack_require__(622); -const util_1 = __webpack_require__(669); -const stats = util_1.promisify(fs_1.stat); -function getDefaultGlobOptions() { - return { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; + if (acceptGzip) { + // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header + requestOptions['Accept-Encoding'] = 'gzip'; + requestOptions['Accept'] = `application/octet-stream;api-version=${getApiVersion()}`; + } + else { + // default to application/json if we are not working with gzip content + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; + } + return requestOptions; } +exports.getDownloadHeaders = getDownloadHeaders; /** - * If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as - * the delimiter to control the directory structure for the artifact. This function returns the LCA - * when given an array of search paths - * - * Example 1: The patterns `/foo/` and `/bar/` returns `/` - * - * Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo` + * Sets all the necessary headers when uploading an artifact + * @param {string} contentType the type of content being uploaded + * @param {boolean} isKeepAlive is the same connection being used to make multiple calls + * @param {boolean} isGzip is the connection being used to upload GZip compressed content + * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed + * @param {number} contentLength the length of the content that is being uploaded + * @param {string} contentRange the range of the content that is being uploaded + * @returns appropriate headers to make a specific http call during artifact upload */ -function getMultiPathLCA(searchPaths) { - if (searchPaths.length < 2) { - throw new Error('At least two search paths must be provided'); +function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange, digest) { + const requestOptions = {}; + requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`; + if (contentType) { + requestOptions['Content-Type'] = contentType; } - const commonPaths = new Array(); - const splitPaths = new Array(); - let smallestPathLength = Number.MAX_SAFE_INTEGER; - // split each of the search paths using the platform specific separator - for (const searchPath of searchPaths) { - core_1.debug(`Using search path ${searchPath}`); - const splitSearchPath = path.normalize(searchPath).split(path.sep); - // keep track of the smallest path length so that we don't accidentally later go out of bounds - smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length); - splitPaths.push(splitSearchPath); + if (isKeepAlive) { + requestOptions['Connection'] = 'Keep-Alive'; + // keep alive for at least 10 seconds before closing the connection + requestOptions['Keep-Alive'] = '10'; } - // on Unix-like file systems, the file separator exists at the beginning of the file path, make sure to preserve it - if (searchPaths[0].startsWith(path.sep)) { - commonPaths.push(path.sep); + if (isGzip) { + requestOptions['Content-Encoding'] = 'gzip'; + requestOptions['x-tfs-filelength'] = uncompressedLength; } - let splitIndex = 0; - // function to check if the paths are the same at a specific index - function isPathTheSame() { - const compare = splitPaths[0][splitIndex]; - for (let i = 1; i < splitPaths.length; i++) { - if (compare !== splitPaths[i][splitIndex]) { - // a non-common index has been reached - return false; - } - } - return true; + if (contentLength) { + requestOptions['Content-Length'] = contentLength; } - // loop over all the search paths until there is a non-common ancestor or we go out of bounds - while (splitIndex < smallestPathLength) { - if (!isPathTheSame()) { - break; - } - // if all are the same, add to the end result & increment the index - commonPaths.push(splitPaths[0][splitIndex]); - splitIndex++; + if (contentRange) { + requestOptions['Content-Range'] = contentRange; } - return path.join(...commonPaths); + if (digest) { + requestOptions['x-actions-results-crc64'] = digest.crc64; + requestOptions['x-actions-results-md5'] = digest.md5; + } + return requestOptions; } -function findFilesToUpload(searchPath, globOptions) { +exports.getUploadHeaders = getUploadHeaders; +function createHttpClient(userAgent) { + return new http_client_1.HttpClient(userAgent, [ + new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken()) + ]); +} +exports.createHttpClient = createHttpClient; +function getArtifactUrl() { + const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; + core_1.debug(`Artifact Url: ${artifactUrl}`); + return artifactUrl; +} +exports.getArtifactUrl = getArtifactUrl; +/** + * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information + * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but + * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only + * the information that we want. + * + * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided. + * Other information such as the headers, the response code and message might be useful, so this is displayed. + */ +function displayHttpDiagnostics(response) { + core_1.info(`##### Begin Diagnostic HTTP information ##### +Status Code: ${response.message.statusCode} +Status Message: ${response.message.statusMessage} +Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} +###### End Diagnostic HTTP information ######`); +} +exports.displayHttpDiagnostics = displayHttpDiagnostics; +function createDirectoriesForArtifact(directories) { return __awaiter(this, void 0, void 0, function* () { - const searchResults = []; - const globber = yield glob.create(searchPath, globOptions || getDefaultGlobOptions()); - const rawSearchResults = yield globber.glob(); - /* - Files are saved with case insensitivity. Uploading both a.txt and A.txt will files to be overwritten - Detect any files that could be overwritten for user awareness - */ - const set = new Set(); - /* - Directories will be rejected if attempted to be uploaded. This includes just empty - directories so filter any directories out from the raw search results - */ - for (const searchResult of rawSearchResults) { - const fileStats = yield stats(searchResult); - // isDirectory() returns false for symlinks if using fs.lstat(), make sure to use fs.stat() instead - if (!fileStats.isDirectory()) { - core_1.debug(`File:${searchResult} was found using the provided searchPath`); - searchResults.push(searchResult); - // detect any files that would be overwritten because of case insensitivity - if (set.has(searchResult.toLowerCase())) { - core_1.info(`Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path`); - } - else { - set.add(searchResult.toLowerCase()); - } - } - else { - core_1.debug(`Removing ${searchResult} from rawSearchResults because it is a directory`); - } + for (const directory of directories) { + yield fs_1.promises.mkdir(directory, { + recursive: true + }); } - // Calculate the root directory for the artifact using the search paths that were utilized - const searchPaths = globber.getSearchPaths(); - if (searchPaths.length > 1) { - core_1.info(`Multiple search paths detected. Calculating the least common ancestor of all paths`); - const lcaSearchPath = getMultiPathLCA(searchPaths); - core_1.info(`The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact`); - return { - filesToUpload: searchResults, - rootDirectory: lcaSearchPath - }; + }); +} +exports.createDirectoriesForArtifact = createDirectoriesForArtifact; +function createEmptyFilesForArtifact(emptyFilesToCreate) { + return __awaiter(this, void 0, void 0, function* () { + for (const filePath of emptyFilesToCreate) { + yield (yield fs_1.promises.open(filePath, 'w')).close(); } - /* - Special case for a single file artifact that is uploaded without a directory or wildcard pattern. The directory structure is - not preserved and the root directory will be the single files parent directory - */ - if (searchResults.length === 1 && searchPaths[0] === searchResults[0]) { - return { - filesToUpload: searchResults, - rootDirectory: path_1.dirname(searchResults[0]) - }; + }); +} +exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact; +function getFileSize(filePath) { + return __awaiter(this, void 0, void 0, function* () { + const stats = yield fs_1.promises.stat(filePath); + core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); + return stats.size; + }); +} +exports.getFileSize = getFileSize; +function rmFile(filePath) { + return __awaiter(this, void 0, void 0, function* () { + yield fs_1.promises.unlink(filePath); + }); +} +exports.rmFile = rmFile; +function getProperRetention(retentionInput, retentionSetting) { + if (retentionInput < 0) { + throw new Error('Invalid retention, minimum value is 1.'); + } + let retention = retentionInput; + if (retentionSetting) { + const maxRetention = parseInt(retentionSetting); + if (!isNaN(maxRetention) && maxRetention < retention) { + core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); + retention = maxRetention; } - return { - filesToUpload: searchResults, - rootDirectory: searchPaths[0] - }; + } + return retention; +} +exports.getProperRetention = getProperRetention; +function sleep(milliseconds) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => setTimeout(resolve, milliseconds)); }); } -exports.findFilesToUpload = findFilesToUpload; +exports.sleep = sleep; +function digestForStream(stream) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + const crc64 = new crc64_1.default(); + const md5 = crypto_1.default.createHash('md5'); + stream + .on('data', data => { + crc64.update(data); + md5.update(data); + }) + .on('end', () => resolve({ + crc64: crc64.digest('base64'), + md5: md5.digest('base64') + })) + .on('error', reject); + }); + }); +} +exports.digestForStream = digestForStream; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 875: +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const { promisify } = __webpack_require__(669); +const tmp = __webpack_require__(150); + +// file +module.exports.fileSync = tmp.fileSync; +const fileWithOptions = promisify((options, cb) => + tmp.file(options, (err, path, fd, cleanup) => + err ? cb(err) : cb(undefined, { path, fd, cleanup: promisify(cleanup) }) + ) +); +module.exports.file = async (options) => fileWithOptions(options); + +module.exports.withFile = async function withFile(fn, options) { + const { path, fd, cleanup } = await module.exports.file(options); + try { + return await fn({ path, fd }); + } finally { + await cleanup(); + } +}; + + +// directory +module.exports.dirSync = tmp.dirSync; +const dirWithOptions = promisify((options, cb) => + tmp.dir(options, (err, path, cleanup) => + err ? cb(err) : cb(undefined, { path, cleanup: promisify(cleanup) }) + ) +); +module.exports.dir = async (options) => dirWithOptions(options); + +module.exports.withDir = async function withDir(fn, options) { + const { path, cleanup } = await module.exports.dir(options); + try { + return await fn({ path }); + } finally { + await cleanup(); + } +}; + + +// name generation +module.exports.tmpNameSync = tmp.tmpNameSync; +module.exports.tmpName = promisify(tmp.tmpName); + +module.exports.tmpdir = tmp.tmpdir; + +module.exports.setGracefulCleanup = tmp.setGracefulCleanup; + + +/***/ }), + +/***/ 896: +/***/ (function(module) { + +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; /***/ }), -/***/ 936: +/***/ 923: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(615); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; +const assert = __webpack_require__(357); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +const pathHelper = __webpack_require__(972); +const minimatch_1 = __webpack_require__(93); +const internal_match_kind_1 = __webpack_require__(327); +const internal_path_1 = __webpack_require__(383); +const IS_WINDOWS = process.platform === 'win32'; +class Pattern { + constructor(patternOrNegate, segments) { + /** + * Indicates whether matches should be excluded from the result set + */ + this.negate = false; + // Pattern overload + let pattern; + if (typeof patternOrNegate === 'string') { + pattern = patternOrNegate.trim(); } - this.command = command; - this.properties = properties; - this.message = message; + // Segments overload + else { + // Convert to pattern + segments = segments || []; + assert(segments.length, `Parameter 'segments' must not empty`); + const root = Pattern.getLiteral(segments[0]); + assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); + pattern = new internal_path_1.Path(segments).toString().trim(); + if (patternOrNegate) { + pattern = `!${pattern}`; + } + } + // Negate + while (pattern.startsWith('!')) { + this.negate = !this.negate; + pattern = pattern.substr(1).trim(); + } + // Normalize slashes and ensures absolute root + pattern = Pattern.fixupPattern(pattern); + // Segments + this.segments = new internal_path_1.Path(pattern).segments; + // Trailing slash indicates the pattern should only match directories, not regular files + this.trailingSeparator = pathHelper + .normalizeSeparators(pattern) + .endsWith(path.sep); + pattern = pathHelper.safeTrimTrailingSeparator(pattern); + // Search path (literal path prior to the first glob segment) + let foundGlob = false; + const searchSegments = this.segments + .map(x => Pattern.getLiteral(x)) + .filter(x => !foundGlob && !(foundGlob = x === '')); + this.searchPath = new internal_path_1.Path(searchSegments).toString(); + // Root RegExp (required when determining partial match) + this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); + // Create minimatch + const minimatchOptions = { + dot: true, + nobrace: true, + nocase: IS_WINDOWS, + nocomment: true, + noext: true, + nonegate: true + }; + pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; + this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } + /** + * Matches the pattern against the specified path + */ + match(itemPath) { + // Last segment is globstar? + if (this.segments[this.segments.length - 1] === '**') { + // Normalize slashes + itemPath = pathHelper.normalizeSeparators(itemPath); + // Append a trailing slash. Otherwise Minimatch will not match the directory immediately + // preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns + // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. + if (!itemPath.endsWith(path.sep)) { + // Note, this is safe because the constructor ensures the pattern has an absolute root. + // For example, formats like C: and C:foo on Windows are resolved to an aboslute root. + itemPath = `${itemPath}${path.sep}`; } } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; + else { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + } + // Match + if (this.minimatch.match(itemPath)) { + return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; + } + return internal_match_kind_1.MatchKind.None; } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), - -/***/ 941: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __webpack_require__(676); -const fs = __webpack_require__(747); -const globOptionsHelper = __webpack_require__(898); -const path = __webpack_require__(622); -const patternHelper = __webpack_require__(365); -const internal_match_kind_1 = __webpack_require__(432); -const internal_pattern_1 = __webpack_require__(609); -const internal_search_state_1 = __webpack_require__(242); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); + /** + * Indicates whether the pattern may match descendants of the specified path + */ + partialMatch(itemPath) { + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // matchOne does not handle root path correctly + if (pathHelper.dirname(itemPath) === itemPath) { + return this.rootRegExp.test(itemPath); + } + return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); + /** + * Escapes glob patterns within a path + */ + static globEscape(s) { + return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS + .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment + .replace(/\?/g, '[?]') // escape '?' + .replace(/\*/g, '[*]'); // escape '*' } - glob() { - var e_1, _a; - return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } + /** + * Normalizes slashes and ensures absolute root + */ + static fixupPattern(pattern) { + // Empty + assert(pattern, 'pattern cannot be empty'); + // Must not contain `.` segment, unless first segment + // Must not contain `..` segment + const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); + assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); + // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r + assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); + // Normalize slashes + pattern = pathHelper.normalizeSeparators(pattern); + // Replace leading `.` segment + if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { + pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); + } + // Replace leading `~` segment + else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { + const homedir = os.homedir(); + assert(homedir, 'Unable to determine HOME directory'); + assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); + pattern = Pattern.globEscape(homedir) + pattern.substr(1); + } + // Replace relative drive root, e.g. pattern is C: or C:foo + else if (IS_WINDOWS && + (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); + if (pattern.length > 2 && !root.endsWith('\\')) { + root += '\\'; } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } + pattern = Pattern.globEscape(root) + pattern.substr(2); + } + // Replace relative root, e.g. pattern is \ or \foo + else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { + let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); + if (!root.endsWith('\\')) { + root += '\\'; } - return result; - }); + pattern = Pattern.globEscape(root) + pattern.substr(1); + } + // Otherwise ensure absolute root + else { + pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); + } + return pathHelper.normalizeSeparators(pattern); } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**'))); - } + /** + * Attempts to unescape a pattern segment to create a literal path segment. + * Otherwise returns empty string. + */ + static getLiteral(segment) { + let literal = ''; + for (let i = 0; i < segment.length; i++) { + const c = segment[i]; + // Escape + if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { + literal += segment[++i]; + continue; } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { + // Wildcard + else if (c === '*' || c === '?') { + return ''; + } + // Character set + else if (c === '[' && i + 1 < segment.length) { + let set = ''; + let closed = -1; + for (let i2 = i + 1; i2 < segment.length; i2++) { + const c2 = segment[i2]; + // Escape + if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { + set += segment[++i2]; continue; } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory) { - yield yield __await(item.path); + // Closed + else if (c2 === ']') { + closed = i2; + break; } - // Descend? - else if (!partialMatch) { - continue; + // Otherwise + else { + set += c2; } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); + // Closed? + if (closed >= 0) { + // Cannot convert + if (set.length > 1) { + return ''; + } + // Convert to literal + if (set) { + literal += set; + i = closed; + continue; + } } + // Otherwise fall thru } - }); + // Append + literal += c; + } + return literal; } /** - * Constructs a DefaultGlobber + * Escapes regexp special characters + * https://javascript.info/regexp-escaping */ - static create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); - } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; - } - // Pattern - else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); + static regExpEscape(s) { + return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); } - static stat(item, options, traversalChain) { - return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); +} +exports.Pattern = Pattern; +//# sourceMappingURL=internal-pattern.js.map + +/***/ }), + +/***/ 972: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const assert = __webpack_require__(357); +const path = __webpack_require__(622); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. + * + * For example, on Linux/macOS: + * - `/ => /` + * - `/hello => /` + * + * For example, on Windows: + * - `C:\ => C:\` + * - `C:\hello => C:\` + * - `C: => C:` + * - `C:hello => C:` + * - `\ => \` + * - `\hello => \` + * - `\\hello => \\hello` + * - `\\hello\world => \\hello\world` + */ +function dirname(p) { + // Normalize slashes and trim unnecessary trailing slash + p = safeTrimTrailingSeparator(p); + // Windows UNC root, e.g. \\hello or \\hello\world + if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { + return p; + } + // Get dirname + let result = path.dirname(p); + // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ + if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { + result = safeTrimTrailingSeparator(result); + } + return result; +} +exports.dirname = dirname; +/** + * Roots the path if not already rooted. On Windows, relative roots like `\` + * or `C:` are expanded based on the current working directory. + */ +function ensureAbsoluteRoot(root, itemPath) { + assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); + assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); + // Already rooted + if (hasAbsoluteRoot(itemPath)) { + return itemPath; + } + // Windows + if (IS_WINDOWS) { + // Check for itemPath like C: or C:foo + if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { + let cwd = process.cwd(); + assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + // Drive letter matches cwd? Expand to cwd + if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { + // Drive only, e.g. C: + if (itemPath.length === 2) { + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}`; } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + // Drive + path, e.g. C:foo + else { + if (!cwd.endsWith('\\')) { + cwd += '\\'; } - throw err; + // Preserve specified drive letter case (upper or lower) + return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } } + // Different drive else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); - } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; - } - // Update the traversal chain - traversalChain.push(realPath); + return `${itemPath[0]}:\\${itemPath.substr(2)}`; } - return stats; - }); + } + // Check for itemPath like \ or \foo + else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { + const cwd = process.cwd(); + assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); + return `${cwd[0]}:\\${itemPath.substr(1)}`; + } + } + assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); + // Otherwise ensure root ends with a separator + if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { + // Intentionally empty + } + else { + // Append separator + root += path.sep; } + return root + itemPath; } -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map +exports.ensureAbsoluteRoot = ensureAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\\hello\share` and `C:\hello` (and using alternate separator). + */ +function hasAbsoluteRoot(itemPath) { + assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \\hello\share or C:\hello + return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasAbsoluteRoot = hasAbsoluteRoot; +/** + * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: + * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). + */ +function hasRoot(itemPath) { + assert(itemPath, `isRooted parameter 'itemPath' must not be empty`); + // Normalize separators + itemPath = normalizeSeparators(itemPath); + // Windows + if (IS_WINDOWS) { + // E.g. \ or \hello or \\hello + // E.g. C: or C:\hello + return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); + } + // E.g. /hello + return itemPath.startsWith('/'); +} +exports.hasRoot = hasRoot; +/** + * Removes redundant slashes and converts `/` to `\` on Windows + */ +function normalizeSeparators(p) { + p = p || ''; + // Windows + if (IS_WINDOWS) { + // Convert slashes on Windows + p = p.replace(/\//g, '\\'); + // Remove redundant slashes + const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello + return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC + } + // Remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +exports.normalizeSeparators = normalizeSeparators; +/** + * Normalizes the path separators and trims the trailing separator (when safe). + * For example, `/foo/ => /foo` but `/ => /` + */ +function safeTrimTrailingSeparator(p) { + // Short-circuit if empty + if (!p) { + return ''; + } + // Normalize separators + p = normalizeSeparators(p); + // No trailing slash + if (!p.endsWith(path.sep)) { + return p; + } + // Check '/' on Linux/macOS and '\' on Windows + if (p === path.sep) { + return p; + } + // On Windows check if drive root. E.g. C:\ + if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { + return p; + } + // Otherwise trim trailing slash + return p.substr(0, p.length - 1); +} +exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; +//# sourceMappingURL=internal-path-helper.js.map + +/***/ }), + +/***/ 988: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + /***/ })