diff --git a/.licenses/npm/@actions/artifact.dep.yml b/.licenses/npm/@actions/artifact.dep.yml index c54e649a..5e24feba 100644 --- a/.licenses/npm/@actions/artifact.dep.yml +++ b/.licenses/npm/@actions/artifact.dep.yml @@ -1,6 +1,6 @@ --- name: "@actions/artifact" -version: 0.5.2 +version: 0.6.0 type: npm summary: homepage: diff --git a/dist/index.js b/dist/index.js index 1837061d..34456272 100644 --- a/dist/index.js +++ b/dist/index.js @@ -83,309 +83,676 @@ function wrappy (fn, cb) { /***/ }), -/***/ 16: -/***/ (function(module) { - -module.exports = require("tls"); +/***/ 13: +/***/ (function(module, __unusedexports, __webpack_require__) { -/***/ }), +const assert = __webpack_require__(357) +const path = __webpack_require__(622) +const fs = __webpack_require__(747) +let glob = undefined +try { + glob = __webpack_require__(402) +} catch (_err) { + // treat glob as optional. +} -/***/ 49: -/***/ (function(module, __unusedexports, __webpack_require__) { +const defaultGlobOpts = { + nosort: true, + silent: true +} -var wrappy = __webpack_require__(11) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +// for EMFILE handling +let timeout = 0 -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) +const isWindows = (process.platform === "win32") - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true +const defaults = options => { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] }) -}) -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true } - f.called = false - return f + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) +const rimraf = (p, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') -/***/ }), + defaults(options) -/***/ 82: -/***/ (function(__unusedmodule, exports) { + let busyTries = 0 + let errState = null + let n = 0 -"use strict"; + const next = (er) => { + errState = errState || er + if (--n === 0) + cb(errState) + } -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -//# sourceMappingURL=utils.js.map + const afterGlob = (er, results) => { + if (er) + return cb(er) -/***/ }), + n = results.length + if (n === 0) + return cb() -/***/ 87: -/***/ (function(module) { + results.forEach(p => { + const CB = (er) => { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) + } -module.exports = require("os"); + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(() => rimraf_(p, options, CB), timeout ++) + } -/***/ }), + // already gone + if (er.code === "ENOENT") er = null + } -/***/ 93: -/***/ (function(module, __unusedexports, __webpack_require__) { + timeout = 0 + next(er) + } + rimraf_(p, options, CB) + }) + } -module.exports = minimatch -minimatch.Minimatch = Minimatch + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) -var path = { sep: '/' } -try { - path = __webpack_require__(622) -} catch (er) {} + options.lstat(p, (er, stat) => { + if (!er) + return afterGlob(null, [p]) -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __webpack_require__(306) + glob(p, options.glob, afterGlob) + }) -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } } -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' - -// * => any number of characters -var star = qmark + '*?' +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +const rimraf_ = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === "ENOENT") + return cb(null) -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) + options.unlink(p, er => { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) } -// normalizes slashes. -var slashSplit = /\/+/ - -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) - } -} +const fixWinEPERM = (p, options, er, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') -function ext (a, b) { - a = a || {} - b = b || {} - var t = {} - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - Object.keys(a).forEach(function (k) { - t[k] = a[k] + options.chmod(p, 0o666, er2 => { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, (er3, stats) => { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) }) - return t } -minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return minimatch - - var orig = minimatch +const fixWinEPERMSync = (p, options, er) => { + assert(p) + assert(options) - var m = function minimatch (p, pattern, options) { - return orig.minimatch(p, pattern, ext(def, options)) + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er } - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } + let stats + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } - return m + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) } -Minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return Minimatch - return minimatch.defaults(def).Minimatch +const rmdir = (p, options, originalEr, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) } -function minimatch (p, pattern, options) { - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } +const rmkids = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') - if (!options) options = {} + options.readdir(p, (er, files) => { + if (er) + return cb(er) + let n = files.length + if (n === 0) + return options.rmdir(p, cb) + let errState + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +const rimrafSync = (p, options) => { + options = options || {} + defaults(options) - // "" only matches "" - if (pattern.trim() === '') return p === '' + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') - return new Minimatch(pattern, options).match(p) -} + let results -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } } - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } + if (!results.length) + return - if (!options) options = {} - pattern = pattern.trim() + for (let i = 0; i < results.length; i++) { + const p = results[i] - // windows support: need to use /, not \ - if (path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } + let st + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } - // make the set of regexps etc. - this.make() + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } } -Minimatch.prototype.debug = function () {} +const rmdirSync = (p, options, originalEr) => { + assert(p) + assert(options) -Minimatch.prototype.make = make -function make () { - // don't do it more than once. - if (this._made) return + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} - var pattern = this.pattern - var options = this.options +const rmkidsSync = (p, options) => { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const retries = isWindows ? 100 : 1 + let i = 0 + do { + let threw = true + try { + const ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} - // step 1: figure out negation, etc. - this.parseNegate() +module.exports = rimraf +rimraf.sync = rimrafSync - // step 2: expand braces - var set = this.globSet = this.braceExpand() - if (options.debug) this.debug = console.error +/***/ }), - this.debug(this.pattern, set) +/***/ 16: +/***/ (function(module) { - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) +module.exports = require("tls"); - this.debug(this.pattern, set) +/***/ }), - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) +/***/ 49: +/***/ (function(module, __unusedexports, __webpack_require__) { - this.debug(this.pattern, set) +var wrappy = __webpack_require__(11) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true }) - this.debug(this.pattern, set) + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) - this.set = set +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f } -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} - if (options.nonegate) return - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ - } +/***/ }), + +/***/ 82: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +//# sourceMappingURL=utils.js.map + +/***/ }), + +/***/ 87: +/***/ (function(module) { + +module.exports = require("os"); + +/***/ }), + +/***/ 93: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = __webpack_require__(622) +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __webpack_require__(306) + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } if (negateOffset) this.pattern = pattern.substr(negateOffset) this.negate = negate @@ -1738,10 +2105,8 @@ const fs = __webpack_require__(747); const os = __webpack_require__(87); const path = __webpack_require__(622); const crypto = __webpack_require__(417); -const _c = fs.constants && os.constants ? - { fs: fs.constants, os: os.constants } : - process.binding('constants'); -const rimraf = __webpack_require__(569); +const _c = { fs: fs.constants, os: os.constants }; +const rimraf = __webpack_require__(13); /* * The working inner variables. @@ -1756,149 +2121,45 @@ const CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), + // constants are off on the windows platform and will not match the actual errno codes + IS_WIN32 = os.platform() === 'win32', EBADF = _c.EBADF || _c.os.errno.EBADF, ENOENT = _c.ENOENT || _c.os.errno.ENOENT, - DIR_MODE = 448 /* 0o700 */, - FILE_MODE = 384 /* 0o600 */, + DIR_MODE = 0o700 /* 448 */, + FILE_MODE = 0o600 /* 384 */, EXIT = 'exit', - SIGINT = 'SIGINT', - // this will hold the objects need to be removed on exit - _removeObjects = []; + _removeObjects = [], + + // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback + FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), + FN_RIMRAF_SYNC = rimraf.sync; -var +let _gracefulCleanup = false; /** - * Random name generator based on crypto. - * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript - * - * @param {number} howMany - * @returns {string} the generated random name - * @private - */ -function _randomChars(howMany) { - var - value = [], - rnd = null; - - // make sure that we do not fail because we ran out of entropy - try { - rnd = crypto.randomBytes(howMany); - } catch (e) { - rnd = crypto.pseudoRandomBytes(howMany); - } - - for (var i = 0; i < howMany; i++) { - value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); - } - - return value.join(''); -} - -/** - * Checks whether the `obj` parameter is defined or not. - * - * @param {Object} obj - * @returns {boolean} true if the object is undefined - * @private - */ -function _isUndefined(obj) { - return typeof obj === 'undefined'; -} - -/** - * Parses the function arguments. - * - * This function helps to have optional arguments. - * - * @param {(Options|Function)} options - * @param {Function} callback - * @returns {Array} parsed arguments - * @private - */ -function _parseArguments(options, callback) { - /* istanbul ignore else */ - if (typeof options === 'function') { - return [{}, options]; - } - - /* istanbul ignore else */ - if (_isUndefined(options)) { - return [{}, callback]; - } - - return [options, callback]; -} - -/** - * Generates a new temporary name. - * - * @param {Object} opts - * @returns {string} the new random name according to opts - * @private - */ -function _generateTmpName(opts) { - - const tmpDir = _getTmpDir(); - - // fail early on missing tmp dir - if (isBlank(opts.dir) && isBlank(tmpDir)) { - throw new Error('No tmp dir specified'); - } - - /* istanbul ignore else */ - if (!isBlank(opts.name)) { - return path.join(opts.dir || tmpDir, opts.name); - } - - // mkstemps like template - // opts.template has already been guarded in tmpName() below - /* istanbul ignore else */ - if (opts.template) { - var template = opts.template; - // make sure that we prepend the tmp path if none was given - /* istanbul ignore else */ - if (path.basename(template) === template) - template = path.join(opts.dir || tmpDir, template); - return template.replace(TEMPLATE_PATTERN, _randomChars(6)); - } - - // prefix and postfix - const name = [ - (isBlank(opts.prefix) ? 'tmp-' : opts.prefix), - process.pid, - _randomChars(12), - (opts.postfix ? opts.postfix : '') - ].join(''); - - return path.join(opts.dir || tmpDir, name); -} - -/** - * Gets a temporary file name. + * Gets a temporary file name. * * @param {(Options|tmpNameCallback)} options options or callback * @param {?tmpNameCallback} callback the callback function */ function tmpName(options, callback) { - var + const args = _parseArguments(options, callback), opts = args[0], - cb = args[1], - tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES; - - /* istanbul ignore else */ - if (isNaN(tries) || tries < 0) - return cb(new Error('Invalid tries')); + cb = args[1]; - /* istanbul ignore else */ - if (opts.template && !opts.template.match(TEMPLATE_PATTERN)) - return cb(new Error('Invalid template provided')); + try { + _assertAndSanitizeOptions(opts); + } catch (err) { + return cb(err); + } + let tries = opts.tries; (function _getUniqueName() { try { const name = _generateTmpName(opts); @@ -1929,19 +2190,13 @@ function tmpName(options, callback) { * @throws {Error} if the options are invalid or could not generate a filename */ function tmpNameSync(options) { - var + const args = _parseArguments(options), - opts = args[0], - tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES; - - /* istanbul ignore else */ - if (isNaN(tries) || tries < 0) - throw new Error('Invalid tries'); + opts = args[0]; - /* istanbul ignore else */ - if (opts.template && !opts.template.match(TEMPLATE_PATTERN)) - throw new Error('Invalid template provided'); + _assertAndSanitizeOptions(opts); + let tries = opts.tries; do { const name = _generateTmpName(opts); try { @@ -1957,11 +2212,11 @@ function tmpNameSync(options) { /** * Creates and opens a temporary file. * - * @param {(Options|fileCallback)} options the config options or the callback function + * @param {(Options|null|undefined|fileCallback)} options the config options or the callback function or null or undefined * @param {?fileCallback} callback */ function file(options, callback) { - var + const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; @@ -1973,34 +2228,20 @@ function file(options, callback) { // create and open the file fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { - /* istanbul ignore else */ + /* istanbu ignore else */ if (err) return cb(err); if (opts.discardDescriptor) { - return fs.close(fd, function _discardCallback(err) { - /* istanbul ignore else */ - if (err) { - // Low probability, and the file exists, so this could be - // ignored. If it isn't we certainly need to unlink the - // file, and if that fails too its error is more - // important. - try { - fs.unlinkSync(name); - } catch (e) { - if (!isENOENT(e)) { - err = e; - } - } - return cb(err); - } - cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts)); + return fs.close(fd, function _discardCallback(possibleErr) { + // the chance of getting an error on close here is rather low and might occur in the most edgiest cases only + return cb(possibleErr, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts, false)); }); + } else { + // detachDescriptor passes the descriptor whereas discardDescriptor closes it, either way, we no longer care + // about the descriptor + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + cb(null, name, fd, _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, false)); } - /* istanbul ignore else */ - if (opts.detachDescriptor) { - return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts)); - } - cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts)); }); }); } @@ -2013,7 +2254,7 @@ function file(options, callback) { * @throws {Error} if cannot create a file */ function fileSync(options) { - var + const args = _parseArguments(options), opts = args[0]; @@ -2029,7 +2270,7 @@ function fileSync(options) { return { name: name, fd: fd, - removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts) + removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts, true) }; } @@ -2040,7 +2281,7 @@ function fileSync(options) { * @param {?dirCallback} callback */ function dir(options, callback) { - var + const args = _parseArguments(options, callback), opts = args[0], cb = args[1]; @@ -2055,7 +2296,7 @@ function dir(options, callback) { /* istanbul ignore else */ if (err) return cb(err); - cb(null, name, _prepareTmpDirRemoveCallback(name, opts)); + cb(null, name, _prepareTmpDirRemoveCallback(name, opts, false)); }); }); } @@ -2068,7 +2309,7 @@ function dir(options, callback) { * @throws {Error} if it cannot create a directory */ function dirSync(options) { - var + const args = _parseArguments(options), opts = args[0]; @@ -2077,7 +2318,7 @@ function dirSync(options) { return { name: name, - removeCallback: _prepareTmpDirRemoveCallback(name, opts) + removeCallback: _prepareTmpDirRemoveCallback(name, opts, true) }; } @@ -2090,15 +2331,15 @@ function dirSync(options) { */ function _removeFileAsync(fdPath, next) { const _handler = function (err) { - if (err && !isENOENT(err)) { + if (err && !_isENOENT(err)) { // reraise any unanticipated error return next(err); } next(); - } + }; if (0 <= fdPath[0]) - fs.close(fdPath[0], function (err) { + fs.close(fdPath[0], function () { fs.unlink(fdPath[1], _handler); }); else fs.unlink(fdPath[1], _handler); @@ -2111,117 +2352,104 @@ function _removeFileAsync(fdPath, next) { * @private */ function _removeFileSync(fdPath) { + let rethrownException = null; try { if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); } catch (e) { // reraise any unanticipated error - if (!isEBADF(e) && !isENOENT(e)) throw e; + if (!_isEBADF(e) && !_isENOENT(e)) throw e; } finally { try { fs.unlinkSync(fdPath[1]); } catch (e) { // reraise any unanticipated error - if (!isENOENT(e)) throw e; + if (!_isENOENT(e)) rethrownException = e; } } + if (rethrownException !== null) { + throw rethrownException; + } } /** * Prepares the callback for removal of the temporary file. * + * Returns either a sync callback or a async callback depending on whether + * fileSync or file was called, which is expressed by the sync parameter. + * * @param {string} name the path of the file * @param {number} fd file descriptor * @param {Object} opts - * @returns {fileCallback} + * @param {boolean} sync + * @returns {fileCallback | fileCallbackSync} * @private */ -function _prepareTmpFileRemoveCallback(name, fd, opts) { - const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name]); - const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], removeCallbackSync); +function _prepareTmpFileRemoveCallback(name, fd, opts, sync) { + const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name], sync); + const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], sync, removeCallbackSync); if (!opts.keep) _removeObjects.unshift(removeCallbackSync); - return removeCallback; -} - -/** - * Simple wrapper for rimraf. - * - * @param {string} dirPath - * @param {Function} next - * @private - */ -function _rimrafRemoveDirWrapper(dirPath, next) { - rimraf(dirPath, next); -} - -/** - * Simple wrapper for rimraf.sync. - * - * @param {string} dirPath - * @private - */ -function _rimrafRemoveDirSyncWrapper(dirPath, next) { - try { - return next(null, rimraf.sync(dirPath)); - } catch (err) { - return next(err); - } + return sync ? removeCallbackSync : removeCallback; } /** * Prepares the callback for removal of the temporary directory. * + * Returns either a sync callback or a async callback depending on whether + * tmpFileSync or tmpFile was called, which is expressed by the sync parameter. + * * @param {string} name * @param {Object} opts + * @param {boolean} sync * @returns {Function} the callback * @private */ -function _prepareTmpDirRemoveCallback(name, opts) { - const removeFunction = opts.unsafeCleanup ? _rimrafRemoveDirWrapper : fs.rmdir.bind(fs); - const removeFunctionSync = opts.unsafeCleanup ? _rimrafRemoveDirSyncWrapper : fs.rmdirSync.bind(fs); - const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name); - const removeCallback = _prepareRemoveCallback(removeFunction, name, removeCallbackSync); +function _prepareTmpDirRemoveCallback(name, opts, sync) { + const removeFunction = opts.unsafeCleanup ? rimraf : fs.rmdir.bind(fs); + const removeFunctionSync = opts.unsafeCleanup ? FN_RIMRAF_SYNC : FN_RMDIR_SYNC; + const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name, sync); + const removeCallback = _prepareRemoveCallback(removeFunction, name, sync, removeCallbackSync); if (!opts.keep) _removeObjects.unshift(removeCallbackSync); - return removeCallback; + return sync ? removeCallbackSync : removeCallback; } /** * Creates a guarded function wrapping the removeFunction call. * + * The cleanup callback is save to be called multiple times. + * Subsequent invocations will be ignored. + * * @param {Function} removeFunction - * @param {Object} arg - * @returns {Function} + * @param {string} fileOrDirName + * @param {boolean} sync + * @param {cleanupCallbackSync?} cleanupCallbackSync + * @returns {cleanupCallback | cleanupCallbackSync} * @private */ -function _prepareRemoveCallback(removeFunction, arg, cleanupCallbackSync) { - var called = false; +function _prepareRemoveCallback(removeFunction, fileOrDirName, sync, cleanupCallbackSync) { + let called = false; + // if sync is true, the next parameter will be ignored return function _cleanupCallback(next) { - next = next || function () {}; + + /* istanbul ignore else */ if (!called) { + // remove cleanupCallback from cache const toRemove = cleanupCallbackSync || _cleanupCallback; const index = _removeObjects.indexOf(toRemove); /* istanbul ignore else */ if (index >= 0) _removeObjects.splice(index, 1); called = true; - // sync? - if (removeFunction.length === 1) { - try { - removeFunction(arg); - return next(null); - } - catch (err) { - // if no next is provided and since we are - // in silent cleanup mode on process exit, - // we will ignore the error - return next(err); - } - } else return removeFunction(arg, next); - } else return next(new Error('cleanup callback has already been called')); + if (sync || removeFunction === FN_RMDIR_SYNC || removeFunction === FN_RIMRAF_SYNC) { + return removeFunction(fileOrDirName); + } else { + return removeFunction(fileOrDirName, next || function() {}); + } + } }; } @@ -2246,179 +2474,315 @@ function _garbageCollector() { } /** - * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + * Random name generator based on crypto. + * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript + * + * @param {number} howMany + * @returns {string} the generated random name + * @private */ -function isEBADF(error) { - return isExpectedError(error, -EBADF, 'EBADF'); +function _randomChars(howMany) { + let + value = [], + rnd = null; + + // make sure that we do not fail because we ran out of entropy + try { + rnd = crypto.randomBytes(howMany); + } catch (e) { + rnd = crypto.pseudoRandomBytes(howMany); + } + + for (var i = 0; i < howMany; i++) { + value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); + } + + return value.join(''); } /** - * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * + * @private + * @param {string} s + * @returns {Boolean} true whether the string s is blank, false otherwise */ -function isENOENT(error) { - return isExpectedError(error, -ENOENT, 'ENOENT'); +function _isBlank(s) { + return s === null || _isUndefined(s) || !s.trim(); } /** - * Helper to determine whether the expected error code matches the actual code and errno, - * which will differ between the supported node versions. - * - * - Node >= 7.0: - * error.code {string} - * error.errno {string|number} any numerical value will be negated - * - * - Node >= 6.0 < 7.0: - * error.code {string} - * error.errno {number} negated - * - * - Node >= 4.0 < 6.0: introduces SystemError - * error.code {string} - * error.errno {number} negated + * Checks whether the `obj` parameter is defined or not. * - * - Node >= 0.10 < 4.0: - * error.code {number} negated - * error.errno n/a + * @param {Object} obj + * @returns {boolean} true if the object is undefined + * @private */ -function isExpectedError(error, code, errno) { - return error.code === code || error.code === errno; +function _isUndefined(obj) { + return typeof obj === 'undefined'; } /** - * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * Parses the function arguments. + * + * This function helps to have optional arguments. * + * @param {(Options|null|undefined|Function)} options + * @param {?Function} callback + * @returns {Array} parsed arguments * @private - * @param {string} s - * @returns {Boolean} true whether the string s is blank, false otherwise */ -function isBlank(s) { - return s === null || s === undefined || !s.trim(); +function _parseArguments(options, callback) { + /* istanbul ignore else */ + if (typeof options === 'function') { + return [{}, options]; + } + + /* istanbul ignore else */ + if (_isUndefined(options)) { + return [{}, callback]; + } + + // copy options so we do not leak the changes we make internally + const actualOptions = {}; + for (const key of Object.getOwnPropertyNames(options)) { + actualOptions[key] = options[key]; + } + + return [actualOptions, callback]; } /** - * Sets the graceful cleanup. + * Generates a new temporary name. + * + * @param {Object} opts + * @returns {string} the new random name according to opts + * @private */ -function setGracefulCleanup() { - _gracefulCleanup = true; +function _generateTmpName(opts) { + + const tmpDir = opts.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(opts.name)) + return path.join(tmpDir, opts.dir, opts.name); + + /* istanbul ignore else */ + if (!_isUndefined(opts.template)) + return path.join(tmpDir, opts.dir, opts.template).replace(TEMPLATE_PATTERN, _randomChars(6)); + + // prefix and postfix + const name = [ + opts.prefix ? opts.prefix : 'tmp', + '-', + process.pid, + '-', + _randomChars(12), + opts.postfix ? '-' + opts.postfix : '' + ].join(''); + + return path.join(tmpDir, opts.dir, name); } /** - * Returns the currently configured tmp dir from os.tmpdir(). + * Asserts whether the specified options are valid, also sanitizes options and provides sane defaults for missing + * options. * + * @param {Options} options * @private - * @returns {string} the currently configured tmp dir */ -function _getTmpDir() { - return os.tmpdir(); +function _assertAndSanitizeOptions(options) { + + options.tmpdir = _getTmpDir(options); + + const tmpDir = options.tmpdir; + + /* istanbul ignore else */ + if (!_isUndefined(options.name)) + _assertIsRelative(options.name, 'name', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.dir)) + _assertIsRelative(options.dir, 'dir', tmpDir); + /* istanbul ignore else */ + if (!_isUndefined(options.template)) { + _assertIsRelative(options.template, 'template', tmpDir); + if (!options.template.match(TEMPLATE_PATTERN)) + throw new Error(`Invalid template, found "${options.template}".`); + } + /* istanbul ignore else */ + if (!_isUndefined(options.tries) && isNaN(options.tries) || options.tries < 0) + throw new Error(`Invalid tries, found "${options.tries}".`); + + // if a name was specified we will try once + options.tries = _isUndefined(options.name) ? options.tries || DEFAULT_TRIES : 1; + options.keep = !!options.keep; + options.detachDescriptor = !!options.detachDescriptor; + options.discardDescriptor = !!options.discardDescriptor; + options.unsafeCleanup = !!options.unsafeCleanup; + + // sanitize dir, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.dir = _isUndefined(options.dir) ? '' : path.relative(tmpDir, _resolvePath(options.dir, tmpDir)); + options.template = _isUndefined(options.template) ? undefined : path.relative(tmpDir, _resolvePath(options.template, tmpDir)); + // sanitize further if template is relative to options.dir + options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); + + // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to + options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); + options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; + options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; } /** - * If there are multiple different versions of tmp in place, make sure that - * we recognize the old listeners. + * Resolve the specified path name in respect to tmpDir. + * + * The specified name might include relative path components, e.g. ../ + * so we need to resolve in order to be sure that is is located inside tmpDir * - * @param {Function} listener + * @param name + * @param tmpDir + * @returns {string} * @private - * @returns {Boolean} true whether listener is a legacy listener */ -function _is_legacy_listener(listener) { - return (listener.name === '_exit' || listener.name === '_uncaughtExceptionThrown') - && listener.toString().indexOf('_garbageCollector();') > -1; +function _resolvePath(name, tmpDir) { + const sanitizedName = _sanitizeName(name); + if (sanitizedName.startsWith(tmpDir)) { + return path.resolve(sanitizedName); + } else { + return path.resolve(path.join(tmpDir, sanitizedName)); + } } /** - * Safely install SIGINT listener. - * - * NOTE: this will only work on OSX and Linux. + * Sanitize the specified path name by removing all quote characters. * + * @param name + * @returns {string} * @private */ -function _safely_install_sigint_listener() { - - const listeners = process.listeners(SIGINT); - const existingListeners = []; - for (let i = 0, length = listeners.length; i < length; i++) { - const lstnr = listeners[i]; - /* istanbul ignore else */ - if (lstnr.name === '_tmp$sigint_listener') { - existingListeners.push(lstnr); - process.removeListener(SIGINT, lstnr); - } +function _sanitizeName(name) { + if (_isBlank(name)) { + return name; } - process.on(SIGINT, function _tmp$sigint_listener(doExit) { - for (let i = 0, length = existingListeners.length; i < length; i++) { - // let the existing listener do the garbage collection (e.g. jest sandbox) - try { - existingListeners[i](false); - } catch (err) { - // ignore - } - } - try { - // force the garbage collector even it is called again in the exit listener - _garbageCollector(); - } finally { - if (!!doExit) { - process.exit(0); - } - } - }); + return name.replace(/["']/g, ''); } /** - * Safely install process exit listener. + * Asserts whether specified name is relative to the specified tmpDir. * + * @param {string} name + * @param {string} option + * @param {string} tmpDir + * @throws {Error} * @private */ -function _safely_install_exit_listener() { - const listeners = process.listeners(EXIT); - - // collect any existing listeners - const existingListeners = []; - for (let i = 0, length = listeners.length; i < length; i++) { - const lstnr = listeners[i]; - /* istanbul ignore else */ - // TODO: remove support for legacy listeners once release 1.0.0 is out - if (lstnr.name === '_tmp$safe_listener' || _is_legacy_listener(lstnr)) { - // we must forget about the uncaughtException listener, hopefully it is ours - if (lstnr.name !== '_uncaughtExceptionThrown') { - existingListeners.push(lstnr); - } - process.removeListener(EXIT, lstnr); - } +function _assertIsRelative(name, option, tmpDir) { + if (option === 'name') { + // assert that name is not absolute and does not contain a path + if (path.isAbsolute(name)) + throw new Error(`${option} option must not contain an absolute path, found "${name}".`); + // must not fail on valid . or .. or similar such constructs + let basename = path.basename(name); + if (basename === '..' || basename === '.' || basename !== name) + throw new Error(`${option} option must not contain a path, found "${name}".`); } - // TODO: what was the data parameter good for? - process.addListener(EXIT, function _tmp$safe_listener(data) { - for (let i = 0, length = existingListeners.length; i < length; i++) { - // let the existing listener do the garbage collection (e.g. jest sandbox) - try { - existingListeners[i](data); - } catch (err) { - // ignore - } + else { // if (option === 'dir' || option === 'template') { + // assert that dir or template are relative to tmpDir + if (path.isAbsolute(name) && !name.startsWith(tmpDir)) { + throw new Error(`${option} option must be relative to "${tmpDir}", found "${name}".`); } - _garbageCollector(); - }); + let resolvedPath = _resolvePath(name, tmpDir); + if (!resolvedPath.startsWith(tmpDir)) + throw new Error(`${option} option must be relative to "${tmpDir}", found "${resolvedPath}".`); + } } -_safely_install_exit_listener(); -_safely_install_sigint_listener(); - +/** + * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isEBADF(error) { + return _isExpectedError(error, -EBADF, 'EBADF'); +} + +/** + * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + * + * @private + */ +function _isENOENT(error) { + return _isExpectedError(error, -ENOENT, 'ENOENT'); +} + +/** + * Helper to determine whether the expected error code matches the actual code and errno, + * which will differ between the supported node versions. + * + * - Node >= 7.0: + * error.code {string} + * error.errno {number} any numerical value will be negated + * + * CAVEAT + * + * On windows, the errno for EBADF is -4083 but os.constants.errno.EBADF is different and we must assume that ENOENT + * is no different here. + * + * @param {SystemError} error + * @param {number} errno + * @param {string} code + * @private + */ +function _isExpectedError(error, errno, code) { + return IS_WIN32 ? error.code === code : error.code === code && error.errno === errno; +} + +/** + * Sets the graceful cleanup. + * + * If graceful cleanup is set, tmp will remove all controlled temporary objects on process exit, otherwise the + * temporary objects will remain in place, waiting to be cleaned up on system restart or otherwise scheduled temporary + * object removals. + */ +function setGracefulCleanup() { + _gracefulCleanup = true; +} + +/** + * Returns the currently configured tmp dir from os.tmpdir(). + * + * @private + * @param {?Options} options + * @returns {string} the currently configured tmp dir + */ +function _getTmpDir(options) { + return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); +} + +// Install process exit listener +process.addListener(EXIT, _garbageCollector); + /** * Configuration options. * * @typedef {Object} Options + * @property {?boolean} keep the temporary object (file or dir) will not be garbage collected * @property {?number} tries the number of tries before give up the name generation + * @property (?int) mode the access mode, defaults are 0o700 for directories and 0o600 for files * @property {?string} template the "mkstemp" like filename template - * @property {?string} name fix name - * @property {?string} dir the tmp directory to use + * @property {?string} name fixed name relative to tmpdir or the specified dir option + * @property {?string} dir tmp directory relative to the root tmp directory in use * @property {?string} prefix prefix for the generated name * @property {?string} postfix postfix for the generated name + * @property {?string} tmpdir the root tmp directory which overrides the os tmpdir * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty + * @property {?boolean} detachDescriptor detaches the file descriptor, caller is responsible for closing the file, tmp will no longer try closing the file during garbage collection + * @property {?boolean} discardDescriptor discards the file descriptor (closes file, fd is -1), tmp will no longer try closing the file during garbage collection */ /** * @typedef {Object} FileSyncObject * @property {string} name the name of the file - * @property {string} fd the file descriptor + * @property {string} fd the file descriptor or -1 if the fd has been discarded * @property {fileCallback} removeCallback the callback function to remove the file */ @@ -2438,10 +2802,18 @@ _safely_install_sigint_listener(); * @callback fileCallback * @param {?Error} err the error object if anything goes wrong * @param {string} name the temporary file name - * @param {number} fd the file descriptor + * @param {number} fd the file descriptor or -1 if the fd had been discarded * @param {cleanupCallback} fn the cleanup callback function */ +/** + * @callback fileCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor or -1 if the fd had been discarded + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + /** * @callback dirCallback * @param {?Error} err the error object if anything goes wrong @@ -2449,11 +2821,24 @@ _safely_install_sigint_listener(); * @param {cleanupCallback} fn the cleanup callback function */ +/** + * @callback dirCallbackSync + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallbackSync} fn the cleanup callback function + */ + /** * Removes the temporary created file or directory. * * @callback cleanupCallback - * @param {simpleCallback} [next] function to call after entry was removed + * @param {simpleCallback} [next] function to call whenever the tmp object needs to be removed + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallbackSync */ /** @@ -2465,7 +2850,7 @@ _safely_install_sigint_listener(); // exporting all the needed methods -// evaluate os.tmpdir() lazily, mainly for simplifying testing but it also will +// evaluate _getTmpDir() lazily, mainly for simplifying testing but it also will // allow users to reconfigure the temporary directory Object.defineProperty(module.exports, 'tmpdir', { enumerable: true, @@ -2509,11 +2894,11 @@ class StatusReporter { this.processedCount = 0; this.largeFiles = new Map(); this.totalFileStatus = undefined; - this.largeFileStatus = undefined; this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; } setTotalNumberOfFilesToProcess(fileTotal) { this.totalNumberOfFilesToProcess = fileTotal; + this.processedCount = 0; } start() { // displays information about the total upload/download status @@ -2522,30 +2907,17 @@ class StatusReporter { const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); }, this.displayFrequencyInMilliseconds); - // displays extra information about any large files that take a significant amount of time to upload or download every 1 second - this.largeFileStatus = setInterval(() => { - for (const value of Array.from(this.largeFiles.values())) { - core_1.info(value); - } - // delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added - this.largeFiles.clear(); - }, 1000); } // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload - updateLargeFileStatus(fileName, numerator, denominator) { + updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { // display 1 decimal place without any rounding - const percentage = this.formatPercentage(numerator, denominator); - const displayInformation = `Uploading ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`; - // any previously added display information should be overwritten for the specific large file because a map is being used - this.largeFiles.set(fileName, displayInformation); + const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); + core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); } stop() { if (this.totalFileStatus) { clearInterval(this.totalFileStatus); } - if (this.largeFileStatus) { - clearInterval(this.largeFileStatus); - } } incrementProcessedCount() { this.processedCount++; @@ -3770,6 +4142,7 @@ const core = __importStar(__webpack_require__(470)); const upload_specification_1 = __webpack_require__(590); const upload_http_client_1 = __webpack_require__(608); const utils_1 = __webpack_require__(870); +const path_and_artifact_name_validation_1 = __webpack_require__(553); const download_http_client_1 = __webpack_require__(855); const download_specification_1 = __webpack_require__(532); const config_variables_1 = __webpack_require__(401); @@ -3786,7 +4159,9 @@ class DefaultArtifactClient { */ uploadArtifact(name, files, rootDirectory, options) { return __awaiter(this, void 0, void 0, function* () { - utils_1.checkArtifactName(name); + core.info(`Starting artifact upload +For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); + path_and_artifact_name_validation_1.checkArtifactName(name); // Get specification for the files being uploaded const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); const uploadResponse = { @@ -3807,12 +4182,24 @@ class DefaultArtifactClient { throw new Error('No URL provided by the Artifact Service to upload an artifact to'); } core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`); // Upload each of the files that were found concurrently const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); // Update the size of the artifact to indicate we are done uploading // The uncompressed size is used for display when downloading a zip of the artifact from the UI + core.info(`File upload process has finished. Finalizing the artifact upload`); yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); - core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`); + if (uploadResult.failedItems.length > 0) { + core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`); + } + else { + core.info(`Artifact has been finalized. All files have been successfully uploaded!`); + } + core.info(` +The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes +The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage + +Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`); uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); uploadResponse.size = uploadResult.uploadSize; uploadResponse.failedItems = uploadResult.failedItems; @@ -3875,6 +4262,7 @@ class DefaultArtifactClient { while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; downloadedArtifacts += 1; + core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); // Get container entries for the specific artifact const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); @@ -4955,10 +5343,9 @@ Glob.prototype._stat2 = function (f, abs, er, stat, cb) { /***/ }), /***/ 413: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = __webpack_require__(141); +/***/ (function(module) { +module.exports = require("stream"); /***/ }), @@ -5915,7 +6302,7 @@ class HttpClient { if (useProxy) { // If using proxy, need tunnel if (!tunnel) { - tunnel = __webpack_require__(413); + tunnel = __webpack_require__(988); } const agentOptions = { maxSockets: maxSockets, @@ -5949,458 +6336,160 @@ class HttpClient { if (!agent) { agent = usingSsl ? https.globalAgent : http.globalAgent; } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - }); - } -} -exports.HttpClient = HttpClient; - - -/***/ }), - -/***/ 569: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = rimraf -rimraf.sync = rimrafSync - -var assert = __webpack_require__(357) -var path = __webpack_require__(622) -var fs = __webpack_require__(747) -var glob = __webpack_require__(402) -var _0666 = parseInt('666', 8) - -var defaultGlobOpts = { - nosort: true, - silent: true -} - -// for EMFILE handling -var timeout = 0 - -var isWindows = (process.platform === "win32") - -function defaults (options) { - var methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(function(m) { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 - options.emfileWait = options.emfileWait || 1000 - if (options.glob === false) { - options.disableGlob = true - } - options.disableGlob = options.disableGlob || false - options.glob = options.glob || defaultGlobOpts -} - -function rimraf (p, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert.equal(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - var busyTries = 0 - var errState = null - var n = 0 - - if (options.disableGlob || !glob.hasMagic(p)) - return afterGlob(null, [p]) - - options.lstat(p, function (er, stat) { - if (!er) - return afterGlob(null, [p]) - - glob(p, options.glob, afterGlob) - }) - - function next (er) { - errState = errState || er - if (--n === 0) - cb(errState) - } - - function afterGlob (er, results) { - if (er) - return cb(er) - - n = results.length - if (n === 0) - return cb() - - results.forEach(function (p) { - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && - busyTries < options.maxBusyTries) { - busyTries ++ - var time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(function () { - rimraf_(p, options, CB) - }, time) - } - - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < options.emfileWait) { - return setTimeout(function () { - rimraf_(p, options, CB) - }, timeout ++) - } - - // already gone - if (er.code === "ENOENT") er = null - } - - timeout = 0 - next(er) - }) - }) - } -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, function (er, st) { - if (er && er.code === "ENOENT") - return cb(null) - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === "EPERM" && isWindows) - fixWinEPERM(p, options, er, cb) - - if (st && st.isDirectory()) - return rmdir(p, options, er, cb) - - options.unlink(p, function (er) { - if (er) { - if (er.code === "ENOENT") - return cb(null) - if (er.code === "EPERM") - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - if (er.code === "EISDIR") - return rmdir(p, options, er, cb) - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - if (er) - assert(er instanceof Error) - - options.chmod(p, _0666, function (er2) { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - options.stat(p, function(er3, stats) { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, options, er, cb) - else - options.unlink(p, cb) - }) - }) -} - -function fixWinEPERMSync (p, options, er) { - assert(p) - assert(options) - if (er) - assert(er instanceof Error) - - try { - options.chmodSync(p, _0666) - } catch (er2) { - if (er2.code === "ENOENT") - return - else - throw er - } - - try { - var stats = options.statSync(p) - } catch (er3) { - if (er3.code === "ENOENT") - return - else - throw er - } - - if (stats.isDirectory()) - rmdirSync(p, options, er) - else - options.unlinkSync(p) -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - if (originalEr) - assert(originalEr instanceof Error) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, function (er) { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) - rmkids(p, options, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) -} - -function rmkids(p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, function (er, files) { - if (er) - return cb(er) - var n = files.length - if (n === 0) - return options.rmdir(p, cb) - var errState - files.forEach(function (f) { - rimraf(path.join(p, f), options, function (er) { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - options.rmdir(p, cb) - }) - }) - }) + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } } +exports.HttpClient = HttpClient; -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - var results - - if (options.disableGlob || !glob.hasMagic(p)) { - results = [p] - } else { - try { - options.lstatSync(p) - results = [p] - } catch (er) { - results = glob.sync(p, options.glob) - } - } - if (!results.length) - return +/***/ }), - for (var i = 0; i < results.length; i++) { - var p = results[i] +/***/ 553: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - try { - var st = options.lstatSync(p) - } catch (er) { - if (er.code === "ENOENT") - return +"use strict"; - // Windows can EPERM on stat. Life is suffering. - if (er.code === "EPERM" && isWindows) - fixWinEPERMSync(p, options, er) +Object.defineProperty(exports, "__esModule", { value: true }); +const core_1 = __webpack_require__(470); +/** + * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected + * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain + * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an + * individual filesystem/platform will not be supported on all fileSystems/platforms + * + * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone + */ +const invalidArtifactFilePathCharacters = new Map([ + ['"', ' Double quote "'], + [':', ' Colon :'], + ['<', ' Less than <'], + ['>', ' Greater than >'], + ['|', ' Vertical bar |'], + ['*', ' Asterisk *'], + ['?', ' Question mark ?'], + ['\r', ' Carriage return \\r'], + ['\n', ' Line feed \\n'] +]); +const invalidArtifactNameCharacters = new Map([ + ...invalidArtifactFilePathCharacters, + ['\\', ' Backslash \\'], + ['/', ' Forward slash /'] +]); +/** + * Scans the name of the artifact to make sure there are no illegal characters + */ +function checkArtifactName(name) { + if (!name) { + throw new Error(`Artifact name: ${name}, is incorrectly provided`); } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) - rmdirSync(p, options, null) - else - options.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - if (er.code !== "EISDIR") - throw er - - rmdirSync(p, options, er) + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) { + if (name.includes(invalidCharacterKey)) { + throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()} + +These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); + } } - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - if (originalEr) - assert(originalEr instanceof Error) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") - rmkidsSync(p, options) - } + core_1.info(`Artifact name is valid!`); } - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(function (f) { - rimrafSync(path.join(p, f), options) - }) - - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - var retries = isWindows ? 100 : 1 - var i = 0 - do { - var threw = true - try { - var ret = options.rmdirSync(p, options) - threw = false - return ret - } finally { - if (++i < retries && threw) - continue +exports.checkArtifactName = checkArtifactName; +/** + * Scans the name of the filePath used to make sure there are no illegal characters + */ +function checkArtifactFilePath(path) { + if (!path) { + throw new Error(`Artifact path: ${path}, is incorrectly provided`); + } + for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) { + if (path.includes(invalidCharacterKey)) { + throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter} + +Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()} + +The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems. + `); + } } - } while (true) } - +exports.checkArtifactFilePath = checkArtifactFilePath; +//# sourceMappingURL=path-and-artifact-name-validation.js.map /***/ }), @@ -6616,7 +6705,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); const fs = __importStar(__webpack_require__(747)); const core_1 = __webpack_require__(470); const path_1 = __webpack_require__(622); -const utils_1 = __webpack_require__(870); +const path_and_artifact_name_validation_1 = __webpack_require__(553); /** * Creates a specification that describes how each file that is part of the artifact will be uploaded * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server @@ -6624,7 +6713,7 @@ const utils_1 = __webpack_require__(870); * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact */ function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { - utils_1.checkArtifactName(artifactName); + // artifact name was checked earlier on, no need to check again const specifications = []; if (!fs.existsSync(rootDirectory)) { throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); @@ -6667,7 +6756,7 @@ function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { } // Check for forbidden characters in file paths that will be rejected during upload const uploadPath = file.replace(rootDirectory, ''); - utils_1.checkArtifactFilePath(uploadPath); + path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); /* uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts @@ -6845,7 +6934,7 @@ Object.defineProperty(exports, "__esModule", { value: true }); const fs = __importStar(__webpack_require__(747)); const core = __importStar(__webpack_require__(470)); const tmp = __importStar(__webpack_require__(875)); -const stream = __importStar(__webpack_require__(794)); +const stream = __importStar(__webpack_require__(413)); const utils_1 = __webpack_require__(870); const config_variables_1 = __webpack_require__(401); const util_1 = __webpack_require__(669); @@ -6987,27 +7076,35 @@ class UploadHttpClient { */ uploadFileAsync(httpClientIndex, parameters) { return __awaiter(this, void 0, void 0, function* () { - const totalFileSize = (yield stat(parameters.file)).size; + const fileStat = yield stat(parameters.file); + const totalFileSize = fileStat.size; + // on Windows with mkfifo from MSYS2 stats.isFIFO returns false, so we check if running on Windows node and + // if the file has size of 0 to compensate + const isFIFO = fileStat.isFIFO() || (process.platform === 'win32' && totalFileSize === 0); let offset = 0; let isUploadSuccessful = true; let failedChunkSizes = 0; let uploadFileSize = 0; let isGzip = true; - // the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O + // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O // for creating a new GZip file, an in-memory buffer is used for compression - if (totalFileSize < 65536) { + // with named pipes the file size is reported as zero in that case don't read the file in memory + if (!isFIFO && totalFileSize < 65536) { + core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); - //An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, + // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, // it will not properly get reset to the start of the stream if a chunk upload needs to be retried let openUploadStream; if (totalFileSize < buffer.byteLength) { // compression did not help with reducing the size, use a readable stream from the original file for upload + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); openUploadStream = () => fs.createReadStream(parameters.file); isGzip = false; uploadFileSize = totalFileSize; } else { // create a readable stream using a PassThrough stream that is both readable and writable + core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`); openUploadStream = () => { const passThrough = new stream.PassThrough(); passThrough.end(buffer); @@ -7032,25 +7129,27 @@ class UploadHttpClient { // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the // npm tmp-promise package and this file gets used to create a GZipped file const tempFile = yield tmp.file(); + core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); // create a GZip file of the original file being uploaded, the original file should not be modified in any way uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; // compression did not help with size reduction, use the original file for upload and delete the temp GZip file - if (totalFileSize < uploadFileSize) { + // for named pipes totalFileSize is zero, this assumes compression did help + if (!isFIFO && totalFileSize < uploadFileSize) { + core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`); uploadFileSize = totalFileSize; uploadFilePath = parameters.file; isGzip = false; } + else { + core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`); + } let abortFileUpload = false; // upload only a single chunk at a time while (offset < uploadFileSize) { const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); - // if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status - if (uploadFileSize > 104857600) { - this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize); - } - const start = offset; - const end = offset + chunkSize - 1; + const startChunkIndex = offset; + const endChunkIndex = offset + chunkSize - 1; offset += parameters.maxChunkSize; if (abortFileUpload) { // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed @@ -7058,10 +7157,10 @@ class UploadHttpClient { continue; } const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { - start, - end, + start: startChunkIndex, + end: endChunkIndex, autoClose: false - }), start, end, uploadFileSize, isGzip, totalFileSize); + }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize); if (!result) { // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was // successfully uploaded so the server may report a different size for what was uploaded @@ -7070,9 +7169,16 @@ class UploadHttpClient { core.warning(`Aborting upload for ${parameters.file} due to failure`); abortFileUpload = true; } + else { + // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status + if (uploadFileSize > 8388608) { + this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize); + } + } } // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about + core.debug(`deleting temporary gzip file ${tempFile.path}`); yield tempFile.cleanup(); return { isSuccess: isUploadSuccessful, @@ -7334,6 +7440,19 @@ const fs = __importStar(__webpack_require__(747)); const zlib = __importStar(__webpack_require__(761)); const util_1 = __webpack_require__(669); const stat = util_1.promisify(fs.stat); +/** + * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip + * files then will just waste a lot of time before ultimately being discarded (especially for very large files). + * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is + */ +const gzipExemptFileExtensions = [ + '.gzip', + '.zip', + '.tar.lz', + '.tar.gz', + '.tar.bz2', + '.7z' +]; /** * Creates a Gzip compressed file of an original file at the provided temporary filepath location * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified @@ -7342,6 +7461,12 @@ const stat = util_1.promisify(fs.stat); */ function createGZipFileOnDisk(originalFilePath, tempFilePath) { return __awaiter(this, void 0, void 0, function* () { + for (const gzipExemptExtension of gzipExemptFileExtensions) { + if (originalFilePath.endsWith(gzipExemptExtension)) { + // return a really large number so that the original file gets uploaded + return Number.MAX_SAFE_INTEGER; + } + } return new Promise((resolve, reject) => { const inputStream = fs.createReadStream(originalFilePath); const gzip = zlib.createGzip(); @@ -7572,13 +7697,6 @@ module.exports = require("zlib"); /***/ }), -/***/ 794: -/***/ (function(module) { - -module.exports = require("stream"); - -/***/ }), - /***/ 835: /***/ (function(module) { @@ -7759,9 +7877,6 @@ class DownloadHttpClient { let response; try { response = yield makeDownloadRequest(); - if (core.isDebug()) { - utils_1.displayHttpDiagnostics(response); - } } catch (error) { // if an error is caught, it is usually indicative of a timeout so retry the download @@ -8149,7 +8264,7 @@ function getExponentialRetryTimeInMilliseconds(retryCount) { const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; const maxTime = minTime * config_variables_1.getRetryMultiplier(); // returns a random number between the minTime (inclusive) and the maxTime (exclusive) - return Math.random() * (maxTime - minTime) + minTime; + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds; /** @@ -8328,48 +8443,6 @@ Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} ###### End Diagnostic HTTP information ######`); } exports.displayHttpDiagnostics = displayHttpDiagnostics; -/** - * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected - * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain - * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an - * individual filesystem/platform will not be supported on all fileSystems/platforms - * - * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone - */ -const invalidArtifactFilePathCharacters = ['"', ':', '<', '>', '|', '*', '?']; -const invalidArtifactNameCharacters = [ - ...invalidArtifactFilePathCharacters, - '\\', - '/' -]; -/** - * Scans the name of the artifact to make sure there are no illegal characters - */ -function checkArtifactName(name) { - if (!name) { - throw new Error(`Artifact name: ${name}, is incorrectly provided`); - } - for (const invalidChar of invalidArtifactNameCharacters) { - if (name.includes(invalidChar)) { - throw new Error(`Artifact name is not valid: ${name}. Contains character: "${invalidChar}". Invalid artifact name characters include: ${invalidArtifactNameCharacters.toString()}.`); - } - } -} -exports.checkArtifactName = checkArtifactName; -/** - * Scans the name of the filePath used to make sure there are no illegal characters - */ -function checkArtifactFilePath(path) { - if (!path) { - throw new Error(`Artifact path: ${path}, is incorrectly provided`); - } - for (const invalidChar of invalidArtifactFilePathCharacters) { - if (path.includes(invalidChar)) { - throw new Error(`Artifact path is not valid: ${path}. Contains character: "${invalidChar}". Invalid characters include: ${invalidArtifactFilePathCharacters.toString()}.`); - } - } -} -exports.checkArtifactFilePath = checkArtifactFilePath; function createDirectoriesForArtifact(directories) { return __awaiter(this, void 0, void 0, function* () { for (const directory of directories) { @@ -8430,7 +8503,10 @@ exports.sleep = sleep; /***/ 875: /***/ (function(module, __unusedexports, __webpack_require__) { -const {promisify} = __webpack_require__(669); +"use strict"; + + +const { promisify } = __webpack_require__(669); const tmp = __webpack_require__(150); // file @@ -8985,6 +9061,14 @@ function safeTrimTrailingSeparator(p) { exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; //# sourceMappingURL=internal-path-helper.js.map +/***/ }), + +/***/ 988: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + + /***/ }) /******/ }); \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 972f56b7..1263b071 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5,15 +5,14 @@ "requires": true, "dependencies": { "@actions/artifact": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.5.2.tgz", - "integrity": "sha512-q/r8WSqyxBJ0ffLCRrtjCBTGnAYqP+ID4yG7f7YSlhrQ4thNg/d+Tq9f1YkLPKX46ZR97OWtGDY+oU/nxcqvLw==", + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-0.6.0.tgz", + "integrity": "sha512-o/rT5tJQEXn1mTJhbaNEhC7VO6gNHroDAcalxYSU4VJcrMhkMSBMc5uQ4Uvmdl+lc+Fa2EEzwzPLoYbrXKclGw==", "requires": { "@actions/core": "^1.2.6", "@actions/http-client": "^1.0.11", - "@types/tmp": "^0.1.0", - "tmp": "^0.1.0", - "tmp-promise": "^2.0.2" + "tmp": "^0.2.1", + "tmp-promise": "^3.0.2" } }, "@actions/core": { @@ -3040,11 +3039,6 @@ "integrity": "sha512-Hl219/BT5fLAaz6NDkSuhzasy49dwQS/DSdu4MdggFB8zcXv7vflBI3xp7FEmkmdDkBUI2bPUNeMttp2knYdxw==", "dev": true }, - "@types/tmp": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.1.0.tgz", - "integrity": "sha512-6IwZ9HzWbCq6XoQWhxLpDjuADodH/MKXRUIDFudvgjcVdjFknvmR+DNsoUeer4XPrEnrZs04Jj+kfV9pFsrhmA==" - }, "@types/yargs": { "version": "15.0.4", "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-15.0.4.tgz", @@ -9132,6 +9126,7 @@ "version": "2.6.3", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, "requires": { "glob": "^7.1.3" } @@ -9551,19 +9546,29 @@ "dev": true }, "tmp": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.1.0.tgz", - "integrity": "sha512-J7Z2K08jbGcdA1kkQpJSqLF6T0tdQqpR2pnSUXsIchbPdTI9v3e85cLW0d6WDhwuAleOV71j2xWs8qMPfK7nKw==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", + "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", "requires": { - "rimraf": "^2.6.3" + "rimraf": "^3.0.0" + }, + "dependencies": { + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "requires": { + "glob": "^7.1.3" + } + } } }, "tmp-promise": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-2.1.1.tgz", - "integrity": "sha512-Z048AOz/w9b6lCbJUpevIJpRpUztENl8zdv1bmAKVHimfqRFl92ROkmT9rp7TVBnrEw2gtMTol/2Cp2S2kJa4Q==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", + "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", "requires": { - "tmp": "0.1.0" + "tmp": "^0.2.0" } }, "tmpl": { diff --git a/package.json b/package.json index bd05cb7d..381cebde 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ }, "homepage": "https://github.com/actions/upload-artifact#readme", "dependencies": { - "@actions/artifact": "^0.5.2", + "@actions/artifact": "^0.6.0", "@actions/core": "^1.2.6", "@actions/glob": "^0.1.0", "@actions/io": "^1.0.2"