diff --git a/.eslintrc.yml b/.eslintrc.yml index 2a3fa281df..e185b97711 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -21,22 +21,22 @@ rules: property: 'assign' overrides: - files: - - docs/js/**/*.js + - 'docs/js/**/*.js' env: node: false - files: - - scripts/**/*.js - - package-scripts.js - - karma.conf.js - - .wallaby.js - - .eleventy.js - - bin/* - - lib/cli/**/*.js - - test/node-unit/**/*.js - - test/integration/options/watch.spec.js - - test/integration/helpers.js - - lib/growl.js - - docs/_data/**/*.js + - '.eleventy.js' + - '.wallaby.js' + - 'package-scripts.js' + - 'karma.conf.js' + - 'bin/*' + - 'docs/_data/**/*.js' + - 'lib/cli/**/*.js' + - 'lib/nodejs/**/*.js' + - 'scripts/**/*.js' + - 'test/integration/helpers.js' + - 'test/integration/options/watch.spec.js' + - 'test/node-unit/**/*.js' parserOptions: ecmaVersion: 2018 env: diff --git a/.mocharc.yml b/.mocharc.yml index dfb82e07f6..b458eb5423 100644 --- a/.mocharc.yml +++ b/.mocharc.yml @@ -5,6 +5,7 @@ global: - 'okGlobalC' - 'callback*' timeout: 1000 +parallel: true watch-ignore: - '.*' - 'docs/_dist/**' diff --git a/.travis.yml b/.travis.yml index 4a0c82771a..cbc2ce6722 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,7 +39,8 @@ jobs: - script: COVERAGE=1 npm start test.node after_success: npm start coveralls name: 'Latest Node.js (with coverage)' - + - script: MOCHA_PARALLEL=0 npm start test.node.unit + name: 'Latest Node.js (unit tests in serial mode)' - &node script: npm start test.node node_js: '13' @@ -95,6 +96,9 @@ jobs: script: true name: 'Prime cache' +env: + - 'NODE_OPTIONS="--trace-warnings"' + notifications: email: false webhooks: diff --git a/bin/mocha b/bin/mocha index bc9aa81a0f..d19d71ffcc 100755 --- a/bin/mocha +++ b/bin/mocha @@ -130,8 +130,23 @@ if (Object.keys(nodeArgs).length) { // terminate children. process.on('SIGINT', () => { - proc.kill('SIGINT'); // calls runner.abort() - proc.kill('SIGTERM'); // if that didn't work, we're probably in an infinite loop, so make it die. + // XXX: a previous comment said this would abort the runner, but I can't see that it does + // anything with the default runner. + debug('main process caught SIGINT'); + proc.kill('SIGINT'); + // if running in parallel mode, we will have a proper SIGINT handler, so the below won't + // be needed. + if (!args.parallel || args.jobs < 2) { + // win32 does not support SIGTERM, so use next best thing. + if (require('os').platform() === 'win32') { + proc.kill('SIGKILL'); + } else { + // using SIGKILL won't cleanly close the output streams, which can result + // in cut-off text or a befouled terminal. + debug('sending SIGTERM to child process'); + proc.kill('SIGTERM'); + } + } }); } else { debug('running Mocha in-process'); diff --git a/karma.conf.js b/karma.conf.js index 9337e3d3b8..a414236e3d 100644 --- a/karma.conf.js +++ b/karma.conf.js @@ -30,13 +30,18 @@ module.exports = config => { browserify: { debug: true, configure: function configure(b) { - b.ignore('./lib/cli/*.js') - .ignore('chokidar') + b.ignore('chokidar') .ignore('fs') .ignore('glob') - .ignore('./lib/esm-utils.js') .ignore('path') .ignore('supports-color') + .ignore('./lib/esm-utils.js') + .ignore('./lib/cli/*.js') + .ignore('./lib/nodejs/serializer.js') + .ignore('./lib/nodejs/worker.js') + .ignore('./lib/nodejs/buffered-worker-pool.js') + .ignore('./lib/nodejs/parallel-buffered-runner.js') + .ignore('./lib/nodejs/reporters/parallel-buffered.js') .on('bundled', (err, content) => { if (err) { throw err; diff --git a/lib/browser/growl.js b/lib/browser/growl.js index 016798501a..20e32c6f33 100644 --- a/lib/browser/growl.js +++ b/lib/browser/growl.js @@ -11,6 +11,7 @@ var Date = global.Date; var setTimeout = global.setTimeout; var EVENT_RUN_END = require('../runner').constants.EVENT_RUN_END; +var isBrowser = require('../utils').isBrowser; /** * Checks if browser notification support exists. @@ -25,7 +26,7 @@ var EVENT_RUN_END = require('../runner').constants.EVENT_RUN_END; exports.isCapable = function() { var hasNotificationSupport = 'Notification' in window; var hasPromiseSupport = typeof Promise === 'function'; - return process.browser && hasNotificationSupport && hasPromiseSupport; + return isBrowser() && hasNotificationSupport && hasPromiseSupport; }; /** diff --git a/lib/cli/collect-files.js b/lib/cli/collect-files.js index 4145f4333c..5dc5e1fc99 100644 --- a/lib/cli/collect-files.js +++ b/lib/cli/collect-files.js @@ -17,13 +17,7 @@ const {NO_FILES_MATCH_PATTERN} = require('../errors').constants; /** * Smash together an array of test files in the correct order - * @param {Object} opts - Options - * @param {string[]} opts.extension - File extensions to use - * @param {string[]} opts.spec - Files, dirs, globs to run - * @param {string[]} opts.ignore - Files, dirs, globs to ignore - * @param {string[]} opts.file - List of additional files to include - * @param {boolean} opts.recursive - Find files recursively - * @param {boolean} opts.sort - Sort test files + * @param {FileCollectionOptions} [opts] - Options * @returns {string[]} List of files to test * @private */ @@ -84,3 +78,14 @@ module.exports = ({ignore, extension, file, recursive, sort, spec} = {}) => { return files; }; + +/** + * An object to configure how Mocha gathers test files + * @typedef {Object} FileCollectionOptions + * @property {string[]} extension - File extensions to use + * @property {string[]} spec - Files, dirs, globs to run + * @property {string[]} ignore - Files, dirs, globs to ignore + * @property {string[]} file - List of additional files to include + * @property {boolean} recursive - Find files recursively + * @property {boolean} sort - Sort test files + */ diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index 172fae654a..e09338f2c6 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -10,7 +10,7 @@ const fs = require('fs'); const path = require('path'); const debug = require('debug')('mocha:cli:run:helpers'); -const watchRun = require('./watch-run'); +const {watchRun, watchParallelRun} = require('./watch-run'); const collectFiles = require('./collect-files'); const {type} = require('../utils'); const {format} = require('util'); @@ -151,24 +151,52 @@ const singleRun = async (mocha, {exit}, fileCollectParams) => { }; /** - * Actually run tests + * Collect files and run tests (using `BufferedRunner`). + * + * This is `async` for consistency. + * * @param {Mocha} mocha - Mocha instance - * @param {Object} opts - Command line options + * @param {Options} options - Command line options + * @param {Object} fileCollectParams - Parameters that control test + * file collection. See `lib/cli/collect-files.js`. + * @returns {Promise} + * @ignore * @private - * @returns {Promise} + */ +const parallelRun = async (mocha, options, fileCollectParams) => { + const files = collectFiles(fileCollectParams); + debug( + 'executing %d test file(s) across %d concurrent jobs', + files.length, + options.jobs + ); + mocha.files = files; + + // note that we DO NOT load any files here; this is handled by the worker + return mocha.run(options.exit ? exitMocha : exitMochaLater); +}; + +/** + * Actually run tests. Delegates to one of four different functions: + * - `singleRun`: run tests in serial & exit + * - `watchRun`: run tests in serial, rerunning as files change + * - `parallelRun`: run tests in parallel & exit + * - `watchParallelRun`: run tests in parallel, rerunning as files change + * @param {Mocha} mocha - Mocha instance + * @param {Options} opts - Command line options + * @private + * @returns {Promise} */ exports.runMocha = async (mocha, options) => { const { watch = false, extension = [], - exit = false, ignore = [], file = [], + parallel = false, recursive = false, sort = false, - spec = [], - watchFiles, - watchIgnore + spec = [] } = options; const fileCollectParams = { @@ -180,11 +208,14 @@ exports.runMocha = async (mocha, options) => { spec }; + let run; if (watch) { - watchRun(mocha, {watchFiles, watchIgnore}, fileCollectParams); + run = parallel ? watchParallelRun : watchRun; } else { - await singleRun(mocha, {exit}, fileCollectParams); + run = parallel ? parallelRun : singleRun; } + + return run(mocha, options, fileCollectParams); }; /** diff --git a/lib/cli/run-option-metadata.js b/lib/cli/run-option-metadata.js index 4648d9fbfe..da3b7d995d 100644 --- a/lib/cli/run-option-metadata.js +++ b/lib/cli/run-option-metadata.js @@ -42,11 +42,12 @@ exports.types = { 'list-interfaces', 'list-reporters', 'no-colors', + 'parallel', 'recursive', 'sort', 'watch' ], - number: ['retries'], + number: ['retries', 'jobs'], string: [ 'config', 'fgrep', @@ -75,7 +76,9 @@ exports.aliases = { growl: ['G'], ignore: ['exclude'], invert: ['i'], + jobs: ['j'], 'no-colors': ['C'], + parallel: ['p'], reporter: ['R'], 'reporter-option': ['reporter-options', 'O'], require: ['r'], diff --git a/lib/cli/run.js b/lib/cli/run.js index 1bd938b77a..f2c970fb26 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -151,6 +151,13 @@ exports.builder = yargs => description: 'Inverts --grep and --fgrep matches', group: GROUPS.FILTERS }, + jobs: { + description: + 'Number of concurrent jobs for --parallel; use 1 to run in serial', + defaultDescription: '(number of CPU cores - 1)', + requiresArg: true, + group: GROUPS.RULES + }, 'list-interfaces': { conflicts: Array.from(ONE_AND_DONE_ARGS), description: 'List built-in user interfaces & exit' @@ -170,6 +177,10 @@ exports.builder = yargs => normalize: true, requiresArg: true }, + parallel: { + description: 'Run tests in parallel', + group: GROUPS.RULES + }, recursive: { description: 'Look for tests in subdirectories', group: GROUPS.FILES @@ -272,6 +283,40 @@ exports.builder = yargs => ); } + if (argv.parallel) { + // yargs.conflicts() can't deal with `--file foo.js --no-parallel`, either + if (argv.file) { + throw createUnsupportedError( + '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --file' + ); + } + + // or this + if (argv.sort) { + throw createUnsupportedError( + '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --sort' + ); + } + + if (argv.reporter === 'progress') { + throw createUnsupportedError( + '--reporter=progress is mutually exclusive with --parallel' + ); + } + + if (argv.reporter === 'markdown') { + throw createUnsupportedError( + '--reporter=markdown is mutually exclusive with --parallel' + ); + } + + if (argv.reporter === 'json-stream') { + throw createUnsupportedError( + '--reporter=json-stream is mutually exclusive with --parallel' + ); + } + } + if (argv.compilers) { throw createUnsupportedError( `--compilers is DEPRECATED and no longer supported. diff --git a/lib/cli/watch-run.js b/lib/cli/watch-run.js index b35a906959..2c59490a81 100644 --- a/lib/cli/watch-run.js +++ b/lib/cli/watch-run.js @@ -1,5 +1,6 @@ 'use strict'; +const debug = require('debug')('mocha:cli:watch'); const path = require('path'); const chokidar = require('chokidar'); const Context = require('../context'); @@ -12,6 +13,42 @@ const collectFiles = require('./collect-files'); * @private */ +/** + * Run Mocha in parallel "watch" mode + * @param {Mocha} mocha - Mocha instance + * @param {Object} opts - Options + * @param {string[]} [opts.watchFiles] - List of paths and patterns to + * watch. If not provided all files with an extension included in + * `fileColletionParams.extension` are watched. See first argument of + * `chokidar.watch`. + * @param {string[]} opts.watchIgnore - List of paths and patterns to + * exclude from watching. See `ignored` option of `chokidar`. + * @param {FileCollectionOptions} fileCollectParams - Parameters that control test + * @private + */ +exports.watchParallelRun = ( + mocha, + {watchFiles, watchIgnore}, + fileCollectParams +) => { + debug('creating parallel watcher'); + return createWatcher(mocha, { + watchFiles, + watchIgnore, + beforeRun({mocha}) { + mocha.files = collectFiles(fileCollectParams); + // in parallel mode, the main Mocha process doesn't actually load the + // files. this flag prevents `mocha.run()` from autoloading. + mocha.lazyLoadFiles(true); + return mocha; + }, + afterRun({watcher}) { + blastCache(watcher); + }, + fileCollectParams + }); +}; + /** * Run Mocha in "watch" mode * @param {Mocha} mocha - Mocha instance @@ -22,27 +59,88 @@ const collectFiles = require('./collect-files'); * `chokidar.watch`. * @param {string[]} opts.watchIgnore - List of paths and patterns to * exclude from watching. See `ignored` option of `chokidar`. - * @param {Object} fileCollectParams - Parameters that control test + * @param {FileCollectionOptions} fileCollectParams - Parameters that control test * file collection. See `lib/cli/collect-files.js`. - * @param {string[]} fileCollectParams.extension - List of extensions - * to watch if `opts.watchFiles` is not given. * @private */ -module.exports = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { +exports.watchRun = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { + debug('creating serial watcher'); + // list of all test files + + return createWatcher(mocha, { + watchFiles, + watchIgnore, + beforeRun({mocha}) { + mocha.unloadFiles(); + + // I don't know why we're cloning the root suite. + const rootSuite = mocha.suite.clone(); + + // this `require` is needed because the require cache has been cleared. the dynamic + // exports set via the below call to `mocha.ui()` won't work properly if a + // test depends on this module (see `required-tokens.spec.js`). + const Mocha = require('../mocha'); + + // ... and now that we've gotten a new module, we need to use it again due + // to `mocha.ui()` call + const newMocha = new Mocha(mocha.options); + // don't know why this is needed + newMocha.suite = rootSuite; + // nor this + newMocha.suite.ctx = new Context(); + + // reset the list of files + newMocha.files = collectFiles(fileCollectParams); + + // because we've swapped out the root suite (see the `run` inner function + // in `createRerunner`), we need to call `mocha.ui()` again to set up the context/globals. + newMocha.ui(newMocha.options.ui); + + return newMocha; + }, + afterRun({watcher}) { + blastCache(watcher); + }, + fileCollectParams + }); +}; + +/** + * Bootstraps a chokidar watcher. Handles keyboard input & signals + * @param {Mocha} mocha - Mocha instance + * @param {Object} opts + * @param {BeforeWatchRun} [opts.beforeRun] - Function to call before + * `mocha.run()` + * @param {AfterWatchRun} [opts.afterRun] - Function to call after `mocha.run()` + * @param {string[]} [opts.watchFiles] - List of paths and patterns to watch. If + * not provided all files with an extension included in + * `fileColletionParams.extension` are watched. See first argument of + * `chokidar.watch`. + * @param {string[]} [opts.watchIgnore] - List of paths and patterns to exclude + * from watching. See `ignored` option of `chokidar`. + * @param {FileCollectionOptions} opts.fileCollectParams - List of extensions to watch if `opts.watchFiles` is not given. + * @returns {FSWatcher} + * @ignore + * @private + */ +const createWatcher = ( + mocha, + {watchFiles, watchIgnore, beforeRun, afterRun, fileCollectParams} +) => { if (!watchFiles) { watchFiles = fileCollectParams.extension.map(ext => `**/*.${ext}`); } + debug('ignoring files matching: %s', watchIgnore); + const watcher = chokidar.watch(watchFiles, { ignored: watchIgnore, ignoreInitial: true }); - const rerunner = createRerunner(mocha, () => { - getWatchedFiles(watcher).forEach(file => { - delete require.cache[file]; - }); - mocha.files = collectFiles(fileCollectParams); + const rerunner = createRerunner(mocha, watcher, { + beforeRun, + afterRun }); watcher.on('ready', () => { @@ -53,7 +151,6 @@ module.exports = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { rerunner.scheduleRun(); }); - console.log(); hideCursor(); process.on('exit', () => { showCursor(); @@ -74,36 +171,43 @@ module.exports = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { .toLowerCase(); if (str === 'rs') rerunner.scheduleRun(); }); + + return watcher; }; /** - * Create an object that allows you to rerun tests on the mocha - * instance. `beforeRun` is called everytime before `mocha.run()` is - * called. + * Create an object that allows you to rerun tests on the mocha instance. * * @param {Mocha} mocha - Mocha instance - * @param {function} beforeRun - Called just before `mocha.run()` + * @param {FSWatcher} watcher - chokidar `FSWatcher` instance + * @param {Object} [opts] - Options! + * @param {BeforeWatchRun} [opts.beforeRun] - Function to call before `mocha.run()` + * @param {AfterWatchRun} [opts.afterRun] - Function to call after `mocha.run()` + * @returns {Rerunner} + * @ignore + * @private */ -const createRerunner = (mocha, beforeRun) => { +const createRerunner = (mocha, watcher, {beforeRun, afterRun} = {}) => { // Set to a `Runner` when mocha is running. Set to `null` when mocha is not // running. let runner = null; + // true if a file has changed during a test run let rerunScheduled = false; const run = () => { - try { - beforeRun(); - resetMocha(mocha); - runner = mocha.run(() => { - runner = null; - if (rerunScheduled) { - rerun(); - } - }); - } catch (e) { - console.log(e.stack); - } + mocha = beforeRun ? beforeRun({mocha, watcher}) : mocha; + + runner = mocha.run(() => { + debug('finished watch run'); + runner = null; + afterRun && afterRun({mocha, watcher}); + if (rerunScheduled) { + rerun(); + } else { + debug('waiting for changes...'); + } + }); }; const scheduleRun = () => { @@ -136,32 +240,18 @@ const createRerunner = (mocha, beforeRun) => { * * @param watcher - Instance of a chokidar watcher * @return {string[]} - List of absolute paths + * @ignore + * @private */ const getWatchedFiles = watcher => { const watchedDirs = watcher.getWatched(); - let watchedFiles = []; - Object.keys(watchedDirs).forEach(dir => { - watchedFiles = watchedFiles.concat( - watchedDirs[dir].map(file => path.join(dir, file)) - ); - }); - return watchedFiles; -}; - -/** - * Reset the internal state of the mocha instance so that tests can be rerun. - * - * @param {Mocha} mocha - Mocha instance - * @private - */ -const resetMocha = mocha => { - mocha.unloadFiles(); - mocha.suite = mocha.suite.clone(); - mocha.suite.ctx = new Context(); - // Registers a callback on `mocha.suite` that wires new context to the DSL - // (e.g. `describe`) that is exposed as globals when the test files are - // reloaded. - mocha.ui(mocha.options.ui); + return Object.keys(watchedDirs).reduce( + (acc, dir) => [ + ...acc, + ...watchedDirs[dir].map(file => path.join(dir, file)) + ], + [] + ); }; /** @@ -189,3 +279,43 @@ const showCursor = () => { const eraseLine = () => { process.stdout.write('\u001b[2K'); }; + +/** + * Blast all of the watched files out of `require.cache` + * @param {FSWatcher} watcher - chokidar FSWatcher + * @ignore + * @private + */ +const blastCache = watcher => { + const files = getWatchedFiles(watcher); + files.forEach(file => { + delete require.cache[file]; + }); + debug('deleted %d file(s) from the require cache', files.length); +}; + +/** + * Callback to be run before `mocha.run()` is called. + * Optionally, it can return a new `Mocha` instance. + * @callback BeforeWatchRun + * @private + * @param {{mocha: Mocha, watcher: FSWatcher}} options + * @returns {Mocha} + */ + +/** + * Callback to be run after `mocha.run()` completes. Typically used to clear + * require cache. + * @callback AfterWatchRun + * @private + * @param {{mocha: Mocha, watcher: FSWatcher}} options + * @returns {void} + */ + +/** + * Object containing run control methods + * @typedef {Object} Rerunner + * @private + * @property {Function} run - Calls `mocha.run()` + * @property {Function} scheduleRun - Schedules another call to `run` + */ diff --git a/lib/errors.js b/lib/errors.js index 1e665e5fb3..929f56399c 100644 --- a/lib/errors.js +++ b/lib/errors.js @@ -66,7 +66,12 @@ var constants = { /** * Invalid state transition occuring in `Mocha` instance */ - INSTANCE_ALREADY_DISPOSED: 'ERR_MOCHA_INSTANCE_ALREADY_DISPOSED' + INSTANCE_ALREADY_DISPOSED: 'ERR_MOCHA_INSTANCE_ALREADY_DISPOSED', + + /** + * Use of `only()` w/ `--forbid-only` results in this error. + */ + FORBIDDEN_EXCLUSIVITY: 'ERR_MOCHA_FORBIDDEN_EXCLUSIVITY' }; /** @@ -293,6 +298,23 @@ function createMultipleDoneError(runnable, originalErr) { return err; } +/** + * Creates an error object to be thrown when `.only()` is used with + * `--forbid-only`. + * @public + * @param {Mocha} mocha - Mocha instance + * @returns {Error} Error with code {@link constants.FORBIDDEN_EXCLUSIVITY} + */ +function createForbiddenExclusivityError(mocha) { + var err = new Error( + mocha.isWorker + ? '`.only` is not supported in parallel mode' + : '`.only` forbidden by --forbid-only' + ); + err.code = constants.FORBIDDEN_EXCLUSIVITY; + return err; +} + module.exports = { createInvalidArgumentTypeError: createInvalidArgumentTypeError, createInvalidArgumentValueError: createInvalidArgumentValueError, @@ -307,5 +329,6 @@ module.exports = { createMochaInstanceAlreadyRunningError: createMochaInstanceAlreadyRunningError, createFatalError: createFatalError, createMultipleDoneError: createMultipleDoneError, + createForbiddenExclusivityError: createForbiddenExclusivityError, constants: constants }; diff --git a/lib/hook.js b/lib/hook.js index 6560715fc5..6c12c02bb8 100644 --- a/lib/hook.js +++ b/lib/hook.js @@ -52,3 +52,27 @@ Hook.prototype.error = function(err) { this._error = err; }; + +/** + * Returns an object suitable for IPC. + * Functions are represented by keys beginning with `$$`. + * @private + * @returns {Object} + */ +Hook.prototype.serialize = function serialize() { + return { + $$isPending: this.isPending(), + $$titlePath: this.titlePath(), + ctx: { + currentTest: { + title: this.ctx && this.ctx.currentTest && this.ctx.currentTest.title + } + }, + parent: { + root: this.parent.root, + title: this.parent.title + }, + title: this.title, + type: this.type + }; +}; diff --git a/lib/interfaces/common.js b/lib/interfaces/common.js index e7e84b2511..5fa87e4537 100644 --- a/lib/interfaces/common.js +++ b/lib/interfaces/common.js @@ -4,6 +4,7 @@ var Suite = require('../suite'); var errors = require('../errors'); var createMissingArgumentError = errors.createMissingArgumentError; var createUnsupportedError = errors.createUnsupportedError; +var createForbiddenExclusivityError = errors.createForbiddenExclusivityError; /** * Functions common to more than one interface. @@ -127,9 +128,8 @@ module.exports = function(suites, context, mocha) { suites.unshift(suite); if (opts.isOnly) { if (mocha.options.forbidOnly && shouldBeTested(suite)) { - throw createUnsupportedError('`.only` forbidden'); + throw createForbiddenExclusivityError(mocha); } - suite.parent.appendOnlySuite(suite); } if (suite.pending) { @@ -166,8 +166,9 @@ module.exports = function(suites, context, mocha) { * @returns {*} */ only: function(mocha, test) { - if (mocha.options.forbidOnly) - throw createUnsupportedError('`.only` forbidden'); + if (mocha.options.forbidOnly) { + throw createForbiddenExclusivityError(mocha); + } test.markOnly(); return test; }, diff --git a/lib/mocha.js b/lib/mocha.js index 1ab26958c8..5c39d62cac 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -9,7 +9,7 @@ var escapeRe = require('escape-string-regexp'); var path = require('path'); var builtinReporters = require('./reporters'); -var growl = require('./growl'); +var growl = require('./nodejs/growl'); var utils = require('./utils'); var mocharc = require('./mocharc.json'); var errors = require('./errors'); @@ -26,6 +26,7 @@ var EVENT_FILE_PRE_REQUIRE = Suite.constants.EVENT_FILE_PRE_REQUIRE; var EVENT_FILE_POST_REQUIRE = Suite.constants.EVENT_FILE_POST_REQUIRE; var EVENT_FILE_REQUIRE = Suite.constants.EVENT_FILE_REQUIRE; var sQuote = utils.sQuote; +var debug = require('debug')('mocha:mocha'); exports = module.exports = Mocha; @@ -57,7 +58,7 @@ var mochaStates = utils.defineConstants({ * To require local UIs and reporters when running in node. */ -if (!process.browser && typeof module.paths !== 'undefined') { +if (!utils.isBrowser() && typeof module.paths !== 'undefined') { var cwd = utils.cwd(); module.paths.push(cwd, path.join(cwd, 'node_modules')); } @@ -66,11 +67,6 @@ if (!process.browser && typeof module.paths !== 'undefined') { * Expose internals. */ -/** - * @public - * @class utils - * @memberof Mocha - */ exports.utils = utils; exports.interfaces = require('./interfaces'); /** @@ -118,8 +114,11 @@ exports.Test = require('./test'); * @param {number} [options.slow] - Slow threshold value. * @param {number|string} [options.timeout] - Timeout threshold value. * @param {string} [options.ui] - Interface name. + * @param {boolean} [options.parallel] - Run jobs in parallel + * @param {number} [options.jobs] - Max number of worker processes for parallel runs * @param {MochaRootHookObject} [options.rootHooks] - Hooks to bootstrap the root * suite with + * @param {boolean} [options.isWorker] - Should be `true` if `Mocha` process is running in a worker process. */ function Mocha(options) { options = utils.assign({}, mocharc, options || {}); @@ -128,6 +127,7 @@ function Mocha(options) { // root suite this.suite = new exports.Suite('', new exports.Context(), true); this._cleanReferencesAfterRun = true; + this._state = mochaStates.INIT; this.grep(options.grep) .fgrep(options.fgrep) @@ -171,6 +171,39 @@ function Mocha(options) { if (options.rootHooks) { this.rootHooks(options.rootHooks); } + + /** + * The class which we'll instantiate in {@link Mocha#run}. Defaults to + * {@link Runner} in serial mode; changes in parallel mode. + * @memberof Mocha + * @private + */ + this._runnerClass = exports.Runner; + + /** + * Whether or not to call {@link Mocha#loadFiles} implicitly when calling + * {@link Mocha#run}. If this is `true`, then it's up to the consumer to call + * {@link Mocha#loadFiles} _or_ {@link Mocha#loadFilesAsync}. + * @private + * @memberof Mocha + */ + this._lazyLoadFiles = false; + + /** + * It's useful for a Mocha instance to know if it's running in a worker process. + * We could derive this via other means, but it's helpful to have a flag to refer to. + * @memberof Mocha + * @private + */ + this.isWorker = Boolean(options.isWorker); + + if ( + options.parallel && + (typeof options.jobs === 'undefined' || options.jobs > 1) + ) { + debug('attempting to enable parallel mode'); + this.parallelMode(true); + } } /** @@ -221,20 +254,20 @@ Mocha.prototype.addFile = function(file) { * // Use XUnit reporter and direct its output to file * mocha.reporter('xunit', { output: '/path/to/testspec.xunit.xml' }); */ -Mocha.prototype.reporter = function(reporter, reporterOptions) { - if (typeof reporter === 'function') { - this._reporter = reporter; +Mocha.prototype.reporter = function(reporterName, reporterOptions) { + if (typeof reporterName === 'function') { + this._reporter = reporterName; } else { - reporter = reporter || 'spec'; - var _reporter; + reporterName = reporterName || 'spec'; + var reporter; // Try to load a built-in reporter. - if (builtinReporters[reporter]) { - _reporter = builtinReporters[reporter]; + if (builtinReporters[reporterName]) { + reporter = builtinReporters[reporterName]; } // Try to load reporters from process.cwd() and node_modules - if (!_reporter) { + if (!reporter) { try { - _reporter = require(reporter); + reporter = require(reporterName); } catch (err) { if ( err.code === 'MODULE_NOT_FOUND' || @@ -242,31 +275,31 @@ Mocha.prototype.reporter = function(reporter, reporterOptions) { ) { // Try to load reporters from a path (absolute or relative) try { - _reporter = require(path.resolve(utils.cwd(), reporter)); + reporter = require(path.resolve(utils.cwd(), reporterName)); } catch (_err) { _err.code === 'MODULE_NOT_FOUND' || _err.message.indexOf('Cannot find module') >= 0 - ? utils.warn(sQuote(reporter) + ' reporter not found') + ? utils.warn(sQuote(reporterName) + ' reporter not found') : utils.warn( - sQuote(reporter) + + sQuote(reporterName) + ' reporter blew up with error:\n' + err.stack ); } } else { utils.warn( - sQuote(reporter) + ' reporter blew up with error:\n' + err.stack + sQuote(reporterName) + ' reporter blew up with error:\n' + err.stack ); } } } - if (!_reporter) { + if (!reporter) { throw createInvalidReporterError( - 'invalid reporter ' + sQuote(reporter), - reporter + 'invalid reporter ' + sQuote(reporterName), + reporterName ); } - this._reporter = _reporter; + this._reporter = reporter; } this.options.reporterOption = reporterOptions; // alias option name is used in public reporters xunit/tap/progress @@ -375,7 +408,7 @@ Mocha.prototype.loadFiles = function(fn) { Mocha.prototype.loadFilesAsync = function() { var self = this; var suite = this.suite; - this.loadAsync = true; + this.lazyLoadFiles(true); if (!esmUtils) { return new Promise(function(resolve) { @@ -591,7 +624,7 @@ Mocha.prototype.fullTrace = function(fullTrace) { Mocha.prototype.growl = function() { this.options.growl = this.isGrowlCapable(); if (!this.options.growl) { - var detail = process.browser + var detail = utils.isBrowser() ? 'notification support not available in this browser...' : 'notification support prerequisites not installed...'; console.error(detail + ' cannot enable!'); @@ -848,6 +881,7 @@ Mocha.prototype.forbidPending = function(forbidPending) { /** * Throws an error if mocha is in the wrong state to be able to transition to a "running" state. + * @private */ Mocha.prototype._guardRunningStateTransition = function() { if (this._state === mochaStates.RUNNING) { @@ -914,14 +948,14 @@ Mocha.prototype.run = function(fn) { this._previousRunner.dispose(); this.suite.reset(); } - if (this.files.length && !this.loadAsync) { + if (this.files.length && !this._lazyLoadFiles) { this.loadFiles(); } var self = this; var suite = this.suite; var options = this.options; options.files = this.files; - var runner = new exports.Runner(suite, { + var runner = new this._runnerClass(suite, { delay: options.delay, cleanReferencesAfterRun: this._cleanReferencesAfterRun }); @@ -963,7 +997,7 @@ Mocha.prototype.run = function(fn) { } } - return runner.run(done); + return runner.run(done, {files: this.files, options: options}); }; /** @@ -994,6 +1028,65 @@ Mocha.prototype.rootHooks = function rootHooks(hooks) { return this; }; +/** + * Toggles parallel mode. + * + * Must be run before calling {@link Mocha#run}. Changes the `Runner` class to + * use; also enables lazy file loading if not already done so. + * @param {boolean} [enable] - If `true`, enable; otherwise disable. + * @throws If run in browser + * @throws If Mocha not in "INIT" state + * @returns {Mocha} + * @chainable + * @public + */ +Mocha.prototype.parallelMode = function parallelMode(enable) { + if (utils.isBrowser()) { + throw errors.createUnsupportedError( + 'parallel mode is only supported in Node.js' + ); + } + var parallel = enable === true; + if ( + parallel === this.options.parallel && + this._lazyLoadFiles && + this._runnerClass !== exports.Runner + ) { + return this; + } + if (this._state !== mochaStates.INIT) { + throw errors.createUnsupportedError( + 'cannot change parallel mode after having called run()' + ); + } + this.options.parallel = parallel; + + // swap Runner class + this._runnerClass = parallel + ? require('./nodejs/parallel-buffered-runner') + : exports.Runner; + + // lazyLoadFiles may have been set `true` otherwise (for ESM loading), + // so keep `true` if so. + return this.lazyLoadFiles(this._lazyLoadFiles || parallel); +}; + +/** + * Disables implicit call to {@link Mocha#loadFiles} in {@link Mocha#run}. This + * setting is used by watch mode, parallel mode, and for loading ESM files. + * @todo This should throw if we've already loaded files; such behavior + * necessitates adding a new state. + * @param {boolean} [enable] - If `true`, disable eager loading of files in + * {@link Mocha#run} + * @chainable + * @public + */ +Mocha.prototype.lazyLoadFiles = function lazyLoadFiles(enable) { + this._lazyLoadFiles = enable === true; + debug('set lazy load to %s', enable); + return this; +}; + /** * An alternative way to define root hooks that works with parallel runs. * @typedef {Object} MochaRootHookObject diff --git a/lib/nodejs/buffered-worker-pool.js b/lib/nodejs/buffered-worker-pool.js new file mode 100644 index 0000000000..144333ef1a --- /dev/null +++ b/lib/nodejs/buffered-worker-pool.js @@ -0,0 +1,174 @@ +/** + * A wrapper around a third-party child process worker pool implementation. + * Used by {@link module:buffered-runner}. + * @private + * @module buffered-worker-pool + */ + +'use strict'; + +const serializeJavascript = require('serialize-javascript'); +const workerpool = require('workerpool'); +const {deserialize} = require('./serializer'); +const debug = require('debug')('mocha:parallel:buffered-worker-pool'); +const {createInvalidArgumentTypeError} = require('../errors'); + +const WORKER_PATH = require.resolve('./worker.js'); + +/** + * A mapping of Mocha `Options` objects to serialized values. + * + * This is helpful because we tend to same the same options over and over + * over IPC. + * @type {WeakMap} + */ +let optionsCache = new WeakMap(); + +/** + * These options are passed into the [workerpool](https://npm.im/workerpool) module. + * @type {Partial} + */ +const WORKER_POOL_DEFAULT_OPTS = { + // use child processes, not worker threads! + workerType: 'process', + // ensure the same flags sent to `node` for this `mocha` invocation are passed + // along to children + forkOpts: {execArgv: process.execArgv}, + maxWorkers: workerpool.cpus - 1 +}; + +/** + * A wrapper around a third-party worker pool implementation. + * @private + */ +class BufferedWorkerPool { + /** + * Creates an underlying worker pool instance; determines max worker count + * @param {Partial} [opts] - Options + */ + constructor(opts = {}) { + const maxWorkers = Math.max( + 1, + typeof opts.maxWorkers === 'undefined' + ? WORKER_POOL_DEFAULT_OPTS.maxWorkers + : opts.maxWorkers + ); + + /* istanbul ignore next */ + if (workerpool.cpus < 2) { + // TODO: decide whether we should warn + debug( + 'not enough CPU cores available to run multiple jobs; avoid --parallel on this machine' + ); + } else if (maxWorkers >= workerpool.cpus) { + // TODO: decide whether we should warn + debug( + '%d concurrent job(s) requested, but only %d core(s) available', + maxWorkers, + workerpool.cpus + ); + } + /* istanbul ignore next */ + debug( + 'run(): starting worker pool of max size %d, using node args: %s', + maxWorkers, + process.execArgv.join(' ') + ); + + this.options = Object.assign({}, WORKER_POOL_DEFAULT_OPTS, opts, { + maxWorkers + }); + this._pool = workerpool.pool(WORKER_PATH, this.options); + } + + /** + * Terminates all workers in the pool. + * @param {boolean} [force] - Whether to force-kill workers. By default, lets workers finish their current task before termination. + * @private + * @returns {Promise} + */ + async terminate(force = false) { + /* istanbul ignore next */ + debug('terminate(): terminating with force = %s', force); + return this._pool.terminate(force); + } + + /** + * Adds a test file run to the worker pool queue for execution by a worker process. + * + * Handles serialization/deserialization. + * + * @param {string} filepath - Filepath of test + * @param {Options} [options] - Options for Mocha instance + * @private + * @returns {Promise} + */ + async run(filepath, options = {}) { + if (!filepath || typeof filepath !== 'string') { + throw createInvalidArgumentTypeError( + 'Expected a non-empty filepath', + 'filepath', + 'string' + ); + } + const serializedOptions = BufferedWorkerPool.serializeOptions(options); + const result = await this._pool.exec('run', [filepath, serializedOptions]); + return deserialize(result); + } + + /** + * Returns stats about the state of the worker processes in the pool. + * + * Used for debugging. + * + * @private + */ + stats() { + return this._pool.stats(); + } + + /** + * Instantiates a {@link WorkerPool}. + * @private + */ + static create(...args) { + return new BufferedWorkerPool(...args); + } + + /** + * Given Mocha options object `opts`, serialize into a format suitable for + * transmission over IPC. + * + * @param {Options} [opts] - Mocha options + * @private + * @returns {string} Serialized options + */ + static serializeOptions(opts = {}) { + if (!optionsCache.has(opts)) { + const serialized = serializeJavascript(opts, { + unsafe: true, // this means we don't care about XSS + ignoreFunction: true // do not serialize functions + }); + optionsCache.set(opts, serialized); + /* istanbul ignore next */ + debug( + 'serializeOptions(): serialized options %O to: %s', + opts, + serialized + ); + } + return optionsCache.get(opts); + } + + /** + * Resets internal cache of serialized options objects. + * + * For testing/debugging + * @private + */ + static resetOptionsCache() { + optionsCache = new WeakMap(); + } +} + +exports.BufferedWorkerPool = BufferedWorkerPool; diff --git a/lib/growl.js b/lib/nodejs/growl.js similarity index 96% rename from lib/growl.js rename to lib/nodejs/growl.js index 53164563bb..6664d67c67 100644 --- a/lib/growl.js +++ b/lib/nodejs/growl.js @@ -8,7 +8,8 @@ const os = require('os'); const path = require('path'); const {sync: which} = require('which'); -const {EVENT_RUN_END} = require('./runner').constants; +const {EVENT_RUN_END} = require('../runner').constants; +const {isBrowser} = require('../utils'); /** * @summary @@ -25,7 +26,7 @@ const {EVENT_RUN_END} = require('./runner').constants; * @return {boolean} whether Growl notification support can be expected */ exports.isCapable = () => { - if (!process.browser) { + if (!isBrowser()) { return getSupportBinaries().reduce( (acc, binary) => acc || Boolean(which(binary, {nothrow: true})), false diff --git a/lib/nodejs/parallel-buffered-runner.js b/lib/nodejs/parallel-buffered-runner.js new file mode 100644 index 0000000000..d31929804a --- /dev/null +++ b/lib/nodejs/parallel-buffered-runner.js @@ -0,0 +1,293 @@ +/** + * A test Runner that uses a {@link module:buffered-worker-pool}. + * @module parallel-buffered-runner + * @private + */ + +'use strict'; + +const allSettled = require('promise.allsettled'); +const Runner = require('../runner'); +const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; +const debug = require('debug')('mocha:parallel:parallel-buffered-runner'); +const {BufferedWorkerPool} = require('./buffered-worker-pool'); +const {setInterval, clearInterval} = global; +const {createMap} = require('../utils'); + +/** + * Outputs a debug statement with worker stats + * @param {BufferedWorkerPool} pool - Worker pool + */ +/* istanbul ignore next */ +const debugStats = pool => { + const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); + debug( + '%d/%d busy workers; %d idle; %d tasks queued', + busyWorkers, + totalWorkers, + idleWorkers, + pendingTasks + ); +}; + +/** + * The interval at which we will display stats for worker processes in debug mode + */ +const DEBUG_STATS_INTERVAL = 5000; + +const ABORTED = 'ABORTED'; +const IDLE = 'IDLE'; +const ABORTING = 'ABORTING'; +const RUNNING = 'RUNNING'; +const BAILING = 'BAILING'; +const BAILED = 'BAILED'; +const COMPLETE = 'COMPLETE'; + +const states = createMap({ + [IDLE]: new Set([RUNNING, ABORTING]), + [RUNNING]: new Set([COMPLETE, BAILING, ABORTING]), + [COMPLETE]: new Set(), + [ABORTED]: new Set(), + [ABORTING]: new Set([ABORTED]), + [BAILING]: new Set([BAILED, ABORTING]), + [BAILED]: new Set([COMPLETE, ABORTING]) +}); + +/** + * This `Runner` delegates tests runs to worker threads. Does not execute any + * {@link Runnable}s by itself! + * @private + */ +class ParallelBufferedRunner extends Runner { + constructor(...args) { + super(...args); + + let state = IDLE; + Object.defineProperty(this, '_state', { + get() { + return state; + }, + set(newState) { + if (states[state].has(newState)) { + state = newState; + } else { + throw new Error(`invalid state transition: ${state} => ${newState}`); + } + } + }); + + this.once(Runner.constants.EVENT_RUN_END, () => { + this._state = COMPLETE; + }); + } + + /** + * Returns a mapping function to enqueue a file in the worker pool and return results of its execution. + * @param {BufferedWorkerPool} pool - Worker pool + * @param {Options} options - Mocha options + * @returns {FileRunner} Mapping function + */ + _createFileRunner(pool, options) { + return async file => { + debug('run(): enqueueing test file %s', file); + try { + const {failureCount, events} = await pool.run(file, options); + if (this._state === BAILED) { + // short-circuit after a graceful bail. if this happens, + // some other worker has bailed. + // TODO: determine if this is the desired behavior, or if we + // should report the events of this run anyway. + return; + } + debug( + 'run(): completed run of file %s; %d failures / %d events', + file, + failureCount, + events.length + ); + this.failures += failureCount; // can this ever be non-numeric? + let event = events.shift(); + while (event) { + this.emit(event.eventName, event.data, event.error); + if ( + this._state !== BAILING && + event.data && + event.data._bail && + (failureCount || event.error) + ) { + debug('run(): nonzero failure count & found bail flag'); + // we need to let the events complete for this file, as the worker + // should run any cleanup hooks + this._state = BAILING; + } + event = events.shift(); + } + if (this._state === BAILING) { + debug('run(): terminating pool due to "bail" flag'); + this._state = BAILED; + await pool.terminate(); + } + } catch (err) { + if (this._state === BAILED || this._state === ABORTING) { + debug( + 'run(): worker pool terminated with intent; skipping file %s', + file + ); + } else { + // this is an uncaught exception + debug('run(): encountered uncaught exception: %O', err); + if (this.allowUncaught) { + // still have to clean up + this._state = ABORTING; + await pool.terminate(true); + } + throw err; + } + } finally { + debug('run(): done running file %s', file); + } + }; + } + + /** + * Listen on `Process.SIGINT`; terminate pool if caught. + * Returns the listener for later call to `process.removeListener()`. + * @param {BufferedWorkerPool} pool - Worker pool + * @returns {SigIntListener} Listener + */ + _bindSigIntListener(pool) { + const sigIntListener = async () => { + debug('run(): caught a SIGINT'); + this._state = ABORTING; + + try { + debug('run(): force-terminating worker pool'); + await pool.terminate(true); + } catch (err) { + console.error( + `Error while attempting to force-terminate worker pool: ${err}` + ); + process.exitCode = 1; + } finally { + process.nextTick(() => { + debug('run(): imminent death'); + this._state = ABORTED; + process.kill(process.pid, 'SIGINT'); + }); + } + }; + + process.once('SIGINT', sigIntListener); + + return sigIntListener; + } + + /** + * Runs Mocha tests by creating a thread pool, then delegating work to the + * worker threads. + * + * Each worker receives one file, and as workers become available, they take a + * file from the queue and run it. The worker thread execution is treated like + * an RPC--it returns a `Promise` containing serialized information about the + * run. The information is processed as it's received, and emitted to a + * {@link Reporter}, which is likely listening for these events. + * + * @param {Function} callback - Called with an exit code corresponding to + * number of test failures. + * @param {{files: string[], options: Options}} opts - Files to run and + * command-line options, respectively. + */ + run(callback, {files, options} = {}) { + /** + * Listener on `Process.SIGINT` which tries to cleanly terminate the worker pool. + */ + let sigIntListener; + // This function should _not_ return a `Promise`; its parent (`Runner#run`) + // returns this instance, so this should do the same. However, we want to make + // use of `async`/`await`, so we use this IIFE. + + (async () => { + /** + * This is an interval that outputs stats about the worker pool every so often + */ + let debugInterval; + + /** + * @type {BufferedWorkerPool} + */ + let pool; + + try { + pool = BufferedWorkerPool.create({maxWorkers: options.jobs}); + + sigIntListener = this._bindSigIntListener(pool); + + /* istanbul ignore next */ + debugInterval = setInterval( + () => debugStats(pool), + DEBUG_STATS_INTERVAL + ).unref(); + + // this is set for uncaught exception handling in `Runner#uncaught` + // TODO: `Runner` should be using a state machine instead. + this.started = true; + this._state = RUNNING; + + this.emit(EVENT_RUN_BEGIN); + + const results = await allSettled( + files.map(this._createFileRunner(pool, options)) + ); + + // note that pool may already be terminated due to --bail + await pool.terminate(); + + results + .filter(({status}) => status === 'rejected') + .forEach(({reason}) => { + if (this.allowUncaught) { + // yep, just the first one. + throw reason; + } + // "rejected" will correspond to uncaught exceptions. + // unlike the serial runner, the parallel runner can always recover. + this.uncaught(reason); + }); + + if (this._state === ABORTING) { + return; + } + this.emit(EVENT_RUN_END); + debug('run(): completing with failure count %d', this.failures); + callback(this.failures); + } catch (err) { + // this `nextTick` takes us out of the `Promise` scope, so the + // exception will not be caught and returned as a rejected `Promise`, + // which would lead to an `unhandledRejection` event. + process.nextTick(() => { + debug('run(): re-throwing uncaught exception'); + throw err; + }); + } finally { + clearInterval(debugInterval); + process.removeListener('SIGINT', sigIntListener); + } + })(); + return this; + } +} + +module.exports = ParallelBufferedRunner; + +/** + * Listener function intended to be bound to `Process.SIGINT` event + * @callback SigIntListener + * @returns {Promise} + */ + +/** + * A function accepting a test file path and returning the results of a test run + * @callback FileRunner + * @param {string} filename - File to run + * @returns {Promise} + */ diff --git a/lib/nodejs/reporters/parallel-buffered.js b/lib/nodejs/reporters/parallel-buffered.js new file mode 100644 index 0000000000..3fc8b15491 --- /dev/null +++ b/lib/nodejs/reporters/parallel-buffered.js @@ -0,0 +1,133 @@ +/** + * "Buffered" reporter used internally by a worker process when running in parallel mode. + * @module reporters/parallel-buffered + * @private + */ + +'use strict'; + +/** + * Module dependencies. + */ + +const { + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_PENDING, + EVENT_TEST_BEGIN, + EVENT_TEST_END, + EVENT_TEST_RETRY, + EVENT_DELAY_BEGIN, + EVENT_DELAY_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END, + EVENT_RUN_END +} = require('../../runner').constants; +const {SerializableEvent, SerializableWorkerResult} = require('../serializer'); +const debug = require('debug')('mocha:reporters:buffered'); +const Base = require('../../reporters/base'); + +/** + * List of events to listen to; these will be buffered and sent + * when `Mocha#run` is complete (via {@link ParallelBuffered#done}). + */ +const EVENT_NAMES = [ + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_BEGIN, + EVENT_TEST_PENDING, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_RETRY, + EVENT_TEST_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END +]; + +/** + * Like {@link EVENT_NAMES}, except we expect these events to only be emitted + * by the `Runner` once. + */ +const ONCE_EVENT_NAMES = [EVENT_DELAY_BEGIN, EVENT_DELAY_END]; + +/** + * The `ParallelBuffered` reporter is for use by concurrent runs. Instead of outputting + * to `STDOUT`, etc., it retains a list of events it receives and hands these + * off to the callback passed into {@link Mocha#run}. That callback will then + * return the data to the main process. + * @private + */ +class ParallelBuffered extends Base { + /** + * Listens for {@link Runner} events and retains them in an `events` instance prop. + * @param {Runner} runner + */ + constructor(runner, opts) { + super(runner, opts); + + /** + * Retained list of events emitted from the {@link Runner} instance. + * @type {BufferedEvent[]} + * @memberOf Buffered + */ + const events = (this.events = []); + + /** + * mapping of event names to listener functions we've created, + * so we can cleanly _remove_ them from the runner once it's completed. + */ + const listeners = new Map(); + + /** + * Creates a listener for event `eventName` and adds it to the `listeners` + * map. This is a defensive measure, so that we don't a) leak memory or b) + * remove _other_ listeners that may not be associated with this reporter. + * @param {string} eventName - Event name + */ + const createListener = eventName => + listeners + .set(eventName, (runnable, err) => { + events.push(SerializableEvent.create(eventName, runnable, err)); + }) + .get(eventName); + + EVENT_NAMES.forEach(evt => { + runner.on(evt, createListener(evt)); + }); + ONCE_EVENT_NAMES.forEach(evt => { + runner.once(evt, createListener(evt)); + }); + + runner.once(EVENT_RUN_END, () => { + debug('received EVENT_RUN_END'); + listeners.forEach((listener, evt) => { + runner.removeListener(evt, listener); + listeners.delete(evt); + }); + }); + } + + /** + * Calls the {@link Mocha#run} callback (`callback`) with the test failure + * count and the array of {@link BufferedEvent} objects. Resets the array. + * @param {number} failures - Number of failed tests + * @param {Function} callback - The callback passed to {@link Mocha#run}. + */ + done(failures, callback) { + callback(SerializableWorkerResult.create(this.events, failures)); + this.events = []; // defensive + } +} + +/** + * Serializable event data from a `Runner`. Keys of the `data` property + * beginning with `__` will be converted into a function which returns the value + * upon deserialization. + * @typedef {Object} BufferedEvent + * @property {string} name - Event name + * @property {object} data - Event parameters + */ + +module.exports = ParallelBuffered; diff --git a/lib/nodejs/serializer.js b/lib/nodejs/serializer.js new file mode 100644 index 0000000000..dae366ce79 --- /dev/null +++ b/lib/nodejs/serializer.js @@ -0,0 +1,402 @@ +/** + * Serialization/deserialization classes and functions for communication between a main Mocha process and worker processes. + * @module serializer + * @private + */ + +'use strict'; + +const {type} = require('../utils'); +const {createInvalidArgumentTypeError} = require('../errors'); +// this is not named `mocha:parallel:serializer` because it's noisy and it's +// helpful to be able to write `DEBUG=mocha:parallel*` and get everything else. +const debug = require('debug')('mocha:serializer'); + +const SERIALIZABLE_RESULT_NAME = 'SerializableWorkerResult'; +const SERIALIZABLE_TYPES = new Set(['object', 'array', 'function', 'error']); + +/** + * The serializable result of a test file run from a worker. + * @private + */ +class SerializableWorkerResult { + /** + * Creates instance props; of note, the `__type` prop. + * + * Note that the failure count is _redundant_ and could be derived from the + * list of events; but since we're already doing the work, might as well use + * it. + * @param {SerializableEvent[]} [events=[]] - Events to eventually serialize + * @param {number} [failureCount=0] - Failure count + */ + constructor(events = [], failureCount = 0) { + /** + * The number of failures in this run + * @type {number} + */ + this.failureCount = failureCount; + /** + * All relevant events emitted from the {@link Runner}. + * @type {SerializableEvent[]} + */ + this.events = events; + + /** + * Symbol-like value needed to distinguish when attempting to deserialize + * this object (once it's been received over IPC). + * @type {Readonly<"SerializableWorkerResult">} + */ + Object.defineProperty(this, '__type', { + value: SERIALIZABLE_RESULT_NAME, + enumerable: true, + writable: false + }); + } + + /** + * Instantiates a new {@link SerializableWorkerResult}. + * @param {...any} args - Args to constructor + * @returns {SerilizableWorkerResult} + */ + static create(...args) { + return new SerializableWorkerResult(...args); + } + + /** + * Serializes each {@link SerializableEvent} in our `events` prop; + * makes this object read-only. + * @returns {Readonly} + */ + serialize() { + this.events.forEach(event => { + event.serialize(); + }); + return Object.freeze(this); + } + + /** + * Deserializes a {@link SerializedWorkerResult} into something reporters can + * use; calls {@link SerializableEvent.deserialize} on each item in its + * `events` prop. + * @param {SerializedWorkerResult} obj + * @returns {SerializedWorkerResult} + */ + static deserialize(obj) { + obj.events.forEach(event => { + SerializableEvent.deserialize(event); + }); + return obj; + } + + /** + * Returns `true` if this is a {@link SerializedWorkerResult} or a + * {@link SerializableWorkerResult}. + * @param {*} value - A value to check + * @returns {boolean} If true, it's deserializable + */ + static isSerializedWorkerResult(value) { + return ( + value instanceof SerializableWorkerResult || + (type(value) === 'object' && value.__type === SERIALIZABLE_RESULT_NAME) + ); + } +} + +/** + * Represents an event, emitted by a {@link Runner}, which is to be transmitted + * over IPC. + * + * Due to the contents of the event data, it's not possible to send them + * verbatim. When received by the main process--and handled by reporters--these + * objects are expected to contain {@link Runnable} instances. This class + * provides facilities to perform the translation via serialization and + * deserialization. + * @private + */ +class SerializableEvent { + /** + * Constructs a `SerializableEvent`, throwing if we receive unexpected data. + * + * Practically, events emitted from `Runner` have a minumum of zero (0) + * arguments-- (for example, {@link Runnable.constants.EVENT_RUN_BEGIN}) and a + * maximum of two (2) (for example, + * {@link Runnable.constants.EVENT_TEST_FAIL}, where the second argument is an + * `Error`). The first argument, if present, is a {@link Runnable}. This + * constructor's arguments adhere to this convention. + * @param {string} eventName - A non-empty event name. + * @param {any} [originalValue] - Some data. Corresponds to extra arguments + * passed to `EventEmitter#emit`. + * @param {Error} [originalError] - An error, if there's an error. + * @throws If `eventName` is empty, or `originalValue` is a non-object. + */ + constructor(eventName, originalValue, originalError) { + if (!eventName) { + throw new Error('expected a non-empty `eventName` string argument'); + } + /** + * The event name. + * @memberof SerializableEvent + */ + this.eventName = eventName; + const originalValueType = type(originalValue); + if (originalValueType !== 'object' && originalValueType !== 'undefined') { + throw new Error( + `expected object, received [${originalValueType}]: ${originalValue}` + ); + } + /** + * An error, if present. + * @memberof SerializableEvent + */ + Object.defineProperty(this, 'originalError', { + value: originalError, + enumerable: false + }); + + /** + * The raw value. + * + * We don't want this value sent via IPC; making it non-enumerable will do that. + * + * @memberof SerializableEvent + */ + Object.defineProperty(this, 'originalValue', { + value: originalValue, + enumerable: false + }); + } + + /** + * In case you hated using `new` (I do). + * + * @param {...any} args - Args for {@link SerializableEvent#constructor}. + * @returns {SerializableEvent} A new `SerializableEvent` + */ + static create(...args) { + return new SerializableEvent(...args); + } + + /** + * Used internally by {@link SerializableEvent#serialize}. + * @ignore + * @param {Array} pairs - List of parent/key tuples to process; modified in-place. This JSDoc type is an approximation + * @param {object} parent - Some parent object + * @param {string} key - Key to inspect + * @param {WeakSet} seenObjects - For avoiding circular references + */ + static _serialize(pairs, parent, key, seenObjects) { + let value = parent[key]; + if (seenObjects.has(value)) { + parent[key] = Object.create(null); + return; + } + if (type(value) === 'error' || value instanceof Error) { + // we need to reference the stack prop b/c it's lazily-loaded. + // `__type` is necessary for deserialization to create an `Error` later. + // `message` is apparently not enumerable, so we must handle it specifically. + value = Object.assign(Object.create(null), value, { + stack: value.stack, + message: value.message, + __type: 'Error' + }); + parent[key] = value; + // after this, the result of type(value) will be `object`, and we'll throw + // whatever other junk is in the original error into the new `value`. + } + switch (type(value)) { + case 'object': + if (type(value.serialize) === 'function') { + parent[key] = value.serialize(); + } else { + // by adding props to the `pairs` array, we will process it further + pairs.push( + ...Object.keys(value) + .filter(key => SERIALIZABLE_TYPES.has(type(value[key]))) + .map(key => [value, key]) + ); + } + break; + case 'function': + // we _may_ want to dig in to functions for some assertion libraries + // that might put a usable property on a function. + // for now, just zap it. + delete parent[key]; + break; + case 'array': + pairs.push( + ...value + .filter(value => SERIALIZABLE_TYPES.has(type(value))) + .map((value, index) => [value, index]) + ); + break; + } + } + + /** + * Modifies this object *in place* (for theoretical memory consumption & + * performance reasons); serializes `SerializableEvent#originalValue` (placing + * the result in `SerializableEvent#data`) and `SerializableEvent#error`. + * Freezes this object. The result is an object that can be transmitted over + * IPC. + * If this quickly becomes unmaintainable, we will want to move towards immutable + * objects post-haste. + */ + serialize() { + // given a parent object and a key, inspect the value and decide whether + // to replace it, remove it, or add it to our `pairs` array to further process. + // this is recursion in loop form. + const originalValue = this.originalValue; + const result = Object.assign(Object.create(null), { + data: + type(originalValue) === 'object' && + type(originalValue.serialize) === 'function' + ? originalValue.serialize() + : originalValue, + error: this.originalError + }); + + const pairs = Object.keys(result).map(key => [result, key]); + const seenObjects = new WeakSet(); + + let pair; + while ((pair = pairs.shift())) { + SerializableEvent._serialize(pairs, ...pair, seenObjects); + seenObjects.add(pair[0]); + } + + this.data = result.data; + this.error = result.error; + + return Object.freeze(this); + } + + /** + * Used internally by {@link SerializableEvent.deserialize}; creates an `Error` + * from an `Error`-like (serialized) object + * @ignore + * @param {Object} value - An Error-like value + * @returns {Error} Real error + */ + static _deserializeError(value) { + const error = new Error(value.message); + error.stack = value.stack; + Object.assign(error, value); + delete error.__type; + return error; + } + + /** + * Used internally by {@link SerializableEvent.deserialize}; recursively + * deserializes an object in-place. + * @param {object|Array} parent - Some object or array + * @param {string|number} key - Some prop name or array index within `parent` + */ + static _deserializeObject(parent, key) { + if (key === '__proto__') { + delete parent[key]; + return; + } + const value = parent[key]; + // keys beginning with `$$` are converted into functions returning the value + // and renamed, stripping the `$$` prefix. + // functions defined this way cannot be array members! + if (type(key) === 'string' && key.startsWith('$$')) { + const newKey = key.slice(2); + parent[newKey] = () => value; + delete parent[key]; + key = newKey; + } + if (type(value) === 'array') { + value.forEach((_, idx) => { + SerializableEvent._deserializeObject(value, idx); + }); + } else if (type(value) === 'object') { + if (value.__type === 'Error') { + parent[key] = SerializableEvent._deserializeError(value); + } else { + Object.keys(value).forEach(key => { + SerializableEvent._deserializeObject(value, key); + }); + } + } + } + + /** + * Deserialize value returned from a worker into something more useful. + * Does not return the same object. + * @todo do this in a loop instead of with recursion (if necessary) + * @param {SerializedEvent} obj - Object returned from worker + * @returns {SerializedEvent} Deserialized result + */ + static deserialize(obj) { + if (!obj) { + throw createInvalidArgumentTypeError('Expected value', obj); + } + + obj = Object.assign(Object.create(null), obj); + + if (obj.data) { + Object.keys(obj.data).forEach(key => { + SerializableEvent._deserializeObject(obj.data, key); + }); + } + + if (obj.error) { + obj.error = SerializableEvent._deserializeError(obj.error); + } + + return obj; + } +} + +/** + * "Serializes" a value for transmission over IPC as a message. + * + * If value is an object and has a `serialize()` method, call that method; otherwise return the object and hope for the best. + * + * @param {*} [value] - A value to serialize + */ +exports.serialize = function serialize(value) { + const result = + type(value) === 'object' && type(value.serialize) === 'function' + ? value.serialize() + : value; + debug('serialized: %O', result); + return result; +}; + +/** + * "Deserializes" a "message" received over IPC. + * + * This could be expanded with other objects that need deserialization, + * but at present time we only care about {@link SerializableWorkerResult} objects. + * + * @param {*} [value] - A "message" to deserialize + */ +exports.deserialize = function deserialize(value) { + const result = SerializableWorkerResult.isSerializedWorkerResult(value) + ? SerializableWorkerResult.deserialize(value) + : value; + debug('deserialized: %O', result); + return result; +}; + +exports.SerializableEvent = SerializableEvent; +exports.SerializableWorkerResult = SerializableWorkerResult; + +/** + * The result of calling `SerializableEvent.serialize`, as received + * by the deserializer. + * @typedef {Object} SerializedEvent + * @property {object?} data - Optional serialized data + * @property {object?} error - Optional serialized `Error` + */ + +/** + * The result of calling `SerializableWorkerResult.serialize` as received + * by the deserializer. + * @typedef {Object} SerializedWorkerResult + * @property {number} failureCount - Number of failures + * @property {SerializedEvent[]} events - Serialized events + * @property {"SerializedWorkerResult"} __type - Symbol-like to denote the type of object this is + */ diff --git a/lib/nodejs/worker.js b/lib/nodejs/worker.js new file mode 100644 index 0000000000..81abb6bb15 --- /dev/null +++ b/lib/nodejs/worker.js @@ -0,0 +1,154 @@ +/** + * A worker process. Consumes {@link module:reporters/parallel-buffered} reporter. + * @module worker + * @private + */ + +'use strict'; + +const { + createInvalidArgumentTypeError, + createInvalidArgumentValueError +} = require('../errors'); +const workerpool = require('workerpool'); +const Mocha = require('../mocha'); +const { + handleRequires, + validatePlugin, + loadRootHooks +} = require('../cli/run-helpers'); +const d = require('debug'); +const debug = d.debug(`mocha:parallel:worker:${process.pid}`); +const isDebugEnabled = d.enabled(`mocha:parallel:worker:${process.pid}`); +const {serialize} = require('./serializer'); +const {setInterval, clearInterval} = global; + +const BUFFERED_REPORTER_PATH = require.resolve('./reporters/parallel-buffered'); + +let rootHooks; + +if (workerpool.isMainThread) { + throw new Error( + 'This script is intended to be run as a worker (by the `workerpool` package).' + ); +} + +/** + * Initializes some stuff on the first call to {@link run}. + * + * Handles `--require` and `--ui`. Does _not_ handle `--reporter`, + * as only the `Buffered` reporter is used. + * + * **This function only runs once per worker**; it overwrites itself with a no-op + * before returning. + * + * @param {Options} argv - Command-line options + */ +let bootstrap = async argv => { + const rawRootHooks = await handleRequires(argv.require); + rootHooks = await loadRootHooks(rawRootHooks); + validatePlugin(argv, 'ui', Mocha.interfaces); + bootstrap = () => {}; + debug('bootstrap(): finished with args: %O', argv); +}; + +/** + * Runs a single test file in a worker thread. + * @param {string} filepath - Filepath of test file + * @param {string} [serializedOptions] - **Serialized** options. This string will be eval'd! + * @see https://npm.im/serialize-javascript + * @returns {Promise<{failures: number, events: BufferedEvent[]}>} - Test + * failure count and list of events. + */ +async function run(filepath, serializedOptions = '{}') { + if (!filepath) { + throw createInvalidArgumentTypeError( + 'Expected a non-empty "filepath" argument', + 'file', + 'string' + ); + } + + debug('run(): running test file %s', filepath); + + if (typeof serializedOptions !== 'string') { + throw createInvalidArgumentTypeError( + 'run() expects second parameter to be a string which was serialized by the `serialize-javascript` module', + 'serializedOptions', + 'string' + ); + } + let argv; + try { + // eslint-disable-next-line no-eval + argv = eval('(' + serializedOptions + ')'); + } catch (err) { + throw createInvalidArgumentValueError( + 'run() was unable to deserialize the options', + 'serializedOptions', + serializedOptions + ); + } + + const opts = Object.assign({ui: 'bdd'}, argv, { + // workers only use the `Buffered` reporter. + reporter: BUFFERED_REPORTER_PATH, + // if this was true, it would cause infinite recursion. + parallel: false, + // this doesn't work in parallel mode + forbidOnly: true, + // it's useful for a Mocha instance to know if it's running in a worker process. + isWorker: true + }); + + await bootstrap(opts); + + opts.rootHooks = rootHooks; + + const mocha = new Mocha(opts).addFile(filepath); + + try { + await mocha.loadFilesAsync(); + } catch (err) { + debug('run(): could not load file %s: %s', filepath, err); + throw err; + } + + return new Promise((resolve, reject) => { + let debugInterval; + /* istanbul ignore next */ + if (isDebugEnabled) { + debugInterval = setInterval(() => { + debug('run(): still running %s...', filepath); + }, 5000).unref(); + } + mocha.run(result => { + // Runner adds these; if we don't remove them, we'll get a leak. + process.removeAllListeners('uncaughtException'); + + try { + const serialized = serialize(result); + debug( + 'run(): completed run with %d test failures; returning to main process', + typeof result.failures === 'number' ? result.failures : 0 + ); + resolve(serialized); + } catch (err) { + // TODO: figure out exactly what the sad path looks like here. + // rejection should only happen if an error is "unrecoverable" + debug('run(): serialization failed; rejecting: %O', err); + reject(err); + } finally { + clearInterval(debugInterval); + } + }); + }); +} + +// this registers the `run` function. +workerpool.worker({run}); + +debug('started worker process'); + +// for testing +exports.run = run; diff --git a/lib/reporters/base.js b/lib/reporters/base.js index ea259445e3..cdf77a2847 100644 --- a/lib/reporters/base.js +++ b/lib/reporters/base.js @@ -10,7 +10,7 @@ var tty = require('tty'); var diff = require('diff'); var milliseconds = require('ms'); var utils = require('../utils'); -var supportsColor = process.browser ? null : require('supports-color'); +var supportsColor = utils.isBrowser() ? null : require('supports-color'); var constants = require('../runner').constants; var EVENT_TEST_PASS = constants.EVENT_TEST_PASS; var EVENT_TEST_FAIL = constants.EVENT_TEST_FAIL; @@ -37,7 +37,7 @@ var consoleLog = console.log; */ exports.useColors = - !process.browser && + !utils.isBrowser() && (supportsColor.stdout || process.env.MOCHA_COLORS !== undefined); /** diff --git a/lib/reporters/landing.js b/lib/reporters/landing.js index 0be3837d7b..03c9acb1af 100644 --- a/lib/reporters/landing.js +++ b/lib/reporters/landing.js @@ -56,11 +56,12 @@ function Landing(runner, options) { var self = this; var width = (Base.window.width * 0.75) | 0; - var total = runner.total; var stream = process.stdout; + var plane = color('plane', '✈'); var crashed = -1; var n = 0; + var total = 0; function runway() { var buf = Array(width).join('-'); @@ -74,8 +75,7 @@ function Landing(runner, options) { runner.on(EVENT_TEST_END, function(test) { // check if the plane crashed - var col = crashed === -1 ? ((width * ++n) / total) | 0 : crashed; - + var col = crashed === -1 ? ((width * ++n) / ++total) | 0 : crashed; // show the crash if (test.state === STATE_FAILED) { plane = color('plane crash', '✈'); diff --git a/lib/reporters/tap.js b/lib/reporters/tap.js index 12257a745f..aa79fb19f1 100644 --- a/lib/reporters/tap.js +++ b/lib/reporters/tap.js @@ -50,9 +50,7 @@ function TAP(runner, options) { this._producer = createProducer(tapVersion); runner.once(EVENT_RUN_BEGIN, function() { - var ntests = runner.grepTotal(runner.suite); self._producer.writeVersion(); - self._producer.writePlan(ntests); }); runner.on(EVENT_TEST_END, function() { @@ -204,6 +202,7 @@ TAPProducer.prototype.writeEpilogue = function(stats) { println('# pass ' + stats.passes); // :TBD: Why are we not showing pending results? println('# fail ' + stats.failures); + this.writePlan(stats.passes + stats.failures + stats.pending); }; /** diff --git a/lib/runner.js b/lib/runner.js index 3c857294f7..22e7bb91d6 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -194,6 +194,7 @@ inherits(Runner, EventEmitter); * @param {EventEmitter} target - The `EventEmitter` * @param {string} eventName - The event name * @param {string} fn - Listener function + * @private */ Runner.prototype._addEventListener = function(target, eventName, listener) { target.on(eventName, listener); @@ -205,6 +206,7 @@ Runner.prototype._addEventListener = function(target, eventName, listener) { * @param {EventEmitter} target - The `EventEmitter` * @param {string} eventName - The event anme * @param {function} listener - Listener function + * @private */ Runner.prototype._removeEventListener = function(target, eventName, listener) { var eventListenerIndex = -1; @@ -898,7 +900,7 @@ Runner.prototype._uncaught = function(err) { return; } // browser does not exit script when throwing in global.onerror() - if (this.allowUncaught && !process.browser) { + if (this.allowUncaught && !utils.isBrowser()) { debug('uncaught(): bubbling exception due to --allow-uncaught'); throw err; } @@ -976,10 +978,11 @@ Runner.prototype._uncaught = function(err) { * * @public * @memberof Runner - * @param {Function} fn + * @param {Function} fn - Callback when finished + * @param {{files: string[], options: Options}} [opts] - For subclasses * @return {Runner} Runner instance. */ -Runner.prototype.run = function(fn) { +Runner.prototype.run = function(fn, opts) { var self = this; var rootSuite = this.suite; diff --git a/lib/suite.js b/lib/suite.js index dc42fd74fd..f3c8b104af 100644 --- a/lib/suite.js +++ b/lib/suite.js @@ -557,6 +557,22 @@ Suite.prototype.cleanReferences = function cleanReferences() { } }; +/** + * Returns an object suitable for IPC. + * Functions are represented by keys beginning with `$$`. + * @private + * @returns {Object} + */ +Suite.prototype.serialize = function serialize() { + return { + _bail: this._bail, + $$fullTitle: this.fullTitle(), + $$isPending: this.isPending(), + root: this.root, + title: this.title + }; +}; + var constants = utils.defineConstants( /** * {@link Suite}-related constants. diff --git a/lib/test.js b/lib/test.js index 187fe49767..3fb3e57a4e 100644 --- a/lib/test.js +++ b/lib/test.js @@ -79,3 +79,31 @@ Test.prototype.clone = function() { test.ctx = this.ctx; return test; }; + +/** + * Returns an minimal object suitable for transmission over IPC. + * Functions are represented by keys beginning with `$$`. + * @private + * @returns {Object} + */ +Test.prototype.serialize = function serialize() { + return { + $$currentRetry: this._currentRetry, + $$fullTitle: this.fullTitle(), + $$isPending: this.pending, + $$retriedTest: this._retriedTest || null, + $$slow: this._slow, + $$titlePath: this.titlePath(), + body: this.body, + duration: this.duration, + err: this.err, + parent: { + $$fullTitle: this.parent.fullTitle() + }, + speed: this.speed, + state: this.state, + title: this.title, + type: this.type, + file: this.file + }; +}; diff --git a/lib/utils.js b/lib/utils.js index 0134247022..b338bc094c 100644 --- a/lib/utils.js +++ b/lib/utils.js @@ -488,7 +488,6 @@ function isHiddenOnUnix(pathname) { * **Make no assumption that the names will be sorted in any fashion.** * * @public - * @memberof Mocha.utils * @param {string} filepath - Base path to start searching from. * @param {string[]} [extensions=[]] - File extensions to look for. * @param {boolean} [recursive=false] - Whether to recurse into subdirectories. @@ -812,7 +811,7 @@ exports.defineConstants = function(obj) { * @returns {Boolean} whether the current version of Node.JS supports ES Modules in a way that is compatible with Mocha */ exports.supportsEsModules = function() { - if (!process.browser && process.versions && process.versions.node) { + if (!exports.isBrowser() && process.versions && process.versions.node) { var versionFields = process.versions.node.split('.'); var major = +versionFields[0]; var minor = +versionFields[1]; @@ -832,3 +831,13 @@ exports.supportsEsModules = function() { exports.cwd = function cwd() { return process.cwd(); }; + +/** + * Returns `true` if Mocha is running in a browser. + * Checks for `process.browser`. + * @returns {boolean} + * @private + */ +exports.isBrowser = function isBrowser() { + return Boolean(process.browser); +}; diff --git a/package-lock.json b/package-lock.json index 78be92c17b..6902e1240a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1237,6 +1237,17 @@ "es-abstract": "^1.17.0-next.1" } }, + "array.prototype.map": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", + "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "es-array-method-boxes-properly": "^1.0.0", + "is-string": "^1.0.4" + } + }, "arraybuffer.slice": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", @@ -2384,6 +2395,12 @@ "wrap-ansi": "^2.0.0" } }, + "eventemitter3": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-1.2.0.tgz", + "integrity": "sha1-HIaZHYFq0eUEdQ5zh0Ik7PO+xQg=", + "dev": true + }, "extend-shallow": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", @@ -3060,6 +3077,16 @@ } } }, + "http-proxy": { + "version": "1.15.2", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.15.2.tgz", + "integrity": "sha1-ZC/cr/5S00SNK9o7AHnpQJBk2jE=", + "dev": true, + "requires": { + "eventemitter3": "1.x.x", + "requires-port": "1.x.x" + } + }, "is-binary-path": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", @@ -5743,7 +5770,6 @@ "version": "1.17.5", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", - "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", @@ -5758,11 +5784,36 @@ "string.prototype.trimright": "^2.1.1" } }, + "es-array-method-boxes-properly": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", + "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==" + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + }, + "dependencies": { + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + } + } + }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -6410,9 +6461,9 @@ } }, "eventemitter3": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-1.2.0.tgz", - "integrity": "sha1-HIaZHYFq0eUEdQ5zh0Ik7PO+xQg=", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.4.tgz", + "integrity": "sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ==", "dev": true }, "events": { @@ -7776,7 +7827,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, "requires": { "function-bind": "^1.1.1" } @@ -8089,13 +8139,14 @@ } }, "http-proxy": { - "version": "1.15.2", - "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.15.2.tgz", - "integrity": "sha1-ZC/cr/5S00SNK9o7AHnpQJBk2jE=", + "version": "1.18.1", + "resolved": "https://registry.npmjs.org/http-proxy/-/http-proxy-1.18.1.tgz", + "integrity": "sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==", "dev": true, "requires": { - "eventemitter3": "1.x.x", - "requires-port": "1.x.x" + "eventemitter3": "^4.0.0", + "follow-redirects": "^1.0.0", + "requires-port": "^1.0.0" } }, "http-signature": { @@ -8701,6 +8752,11 @@ "is-decimal": "^1.0.0" } }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==" + }, "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", @@ -8723,8 +8779,7 @@ "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", - "dev": true + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==" }, "is-ci": { "version": "1.2.1", @@ -8778,8 +8833,7 @@ "is-date-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", - "dev": true + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" }, "is-decimal": { "version": "1.0.4", @@ -8876,6 +8930,11 @@ "is-path-inside": "^1.0.0" } }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==" + }, "is-natural-number": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", @@ -8992,7 +9051,6 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", - "dev": true, "requires": { "has": "^1.0.3" } @@ -9024,6 +9082,11 @@ "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", "dev": true }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==" + }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", @@ -9033,8 +9096,7 @@ "is-string": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", - "dev": true + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==" }, "is-svg": { "version": "3.0.0", @@ -9049,7 +9111,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", - "dev": true, "requires": { "has-symbols": "^1.0.1" } @@ -9283,6 +9344,20 @@ "is-object": "^1.0.1" } }, + "iterate-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", + "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==" + }, + "iterate-value": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", + "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", + "requires": { + "es-get-iterator": "^1.0.2", + "iterate-iterator": "^1.0.1" + } + }, "javascript-stringify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.0.1.tgz", @@ -12316,8 +12391,7 @@ "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", - "dev": true + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==" }, "object-keys": { "version": "1.1.1", @@ -14072,6 +14146,18 @@ "asap": "~2.0.3" } }, + "promise.allsettled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", + "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", + "requires": { + "array.prototype.map": "^1.0.1", + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "iterate-value": "^1.0.0" + } + }, "proto-list": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", @@ -15323,6 +15409,11 @@ } } }, + "serialize-javascript": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-3.0.0.tgz", + "integrity": "sha512-skZcHYw2vEX4bw90nAr2iTTsz6x2SrHEnfxgKYmZlvJYBEZrvbKtobJWlQ20zczKb3bsHHXXTYt48zBA7ni9cw==" + }, "serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", @@ -16461,7 +16552,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -16471,7 +16561,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16482,7 +16571,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16493,7 +16581,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -18849,6 +18936,11 @@ "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", "dev": true }, + "workerpool": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.0.0.tgz", + "integrity": "sha512-fU2OcNA/GVAJLLyKUoHkAgIhKb0JoCpSjLC/G2vYKxUjVmQwGbRVeoPJ1a8U4pnVofz4AQV5Y/NEw8oKqxEBtA==" + }, "wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", diff --git a/package-scripts.js b/package-scripts.js index 7662053795..94b0469994 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -10,10 +10,18 @@ const path = require('path'); * @returns {string} Command string to be executed by nps. */ function test(testName, mochaParams) { - const coverageCommand = `nyc --no-clean --report-dir coverage/reports/${testName}`; + let coverageCommand = `nyc --no-clean --report-dir="coverage/reports/${testName}"`; const mochaCommand = `node ${path.join('bin', 'mocha')}`; // Include 'node' and path.join for Windows compatibility - if (process.env.CI && !/^only-/.test(testName)) { - mochaParams += ' --forbid-only'; + if (process.env.CI) { + // suppress coverage summaries in CI to reduce noise + coverageCommand += ' --reporter=json'; + if (!/^only-/.test(testName)) { + mochaParams += ' --forbid-only'; + } + } + // this may _actually_ be supported in the future + if (process.env.MOCHA_PARALLEL === '0') { + mochaParams += ' --no-parallel'; } if (process.env.TRAVIS) { mochaParams += ' --color'; // force color in travis-ci @@ -26,7 +34,21 @@ function test(testName, mochaParams) { module.exports = { scripts: { build: { - script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' -o mocha.js`, + script: `browserify -e browser-entry.js \ + --plugin ./scripts/dedefine \ + --ignore 'chokidar' \ + --ignore 'fs' \ + --ignore 'glob' \ + --ignore 'path' \ + --ignore 'supports-color' \ + --ignore './lib/cli/*.js' \ + --ignore './lib/esm-utils.js' \ + --ignore './lib/nodejs/serializer.js' \ + --ignore './lib/nodejs/parallel-buffered-runner.js' \ + --ignore './lib/nodejs/reporters/parallel-buffered.js' \ + --ignore './lib/nodejs/worker.js' \ + --ignore './lib/nodejs/buffered-worker-pool.js' \ + -o mocha.js`, description: 'Build browser bundle' }, lint: { @@ -161,19 +183,25 @@ module.exports = { hiddenFromHelp: true }, bdd: { - script: test('only-bdd', '--ui bdd test/only/bdd.spec'), + script: test( + 'only-bdd', + '--ui bdd test/only/bdd.spec --no-parallel' + ), description: 'Run Node.js "only" w/ BDD interface tests', hiddenFromHelp: true }, tdd: { - script: test('only-tdd', '--ui tdd test/only/tdd.spec'), + script: test( + 'only-tdd', + '--ui tdd test/only/tdd.spec --no-parallel' + ), description: 'Run Node.js "only" w/ TDD interface tests', hiddenFromHelp: true }, bddRequire: { script: test( 'only-bdd-require', - '--ui qunit test/only/bdd-require.spec' + '--ui qunit test/only/bdd-require.spec --no-parallel' ), description: 'Run Node.js "only" w/ QUnit interface tests', hiddenFromHelp: true @@ -181,7 +209,7 @@ module.exports = { globalBdd: { script: test( 'only-global-bdd', - '--ui bdd test/only/global/bdd.spec' + '--ui bdd test/only/global/bdd.spec --no-parallel' ), description: 'Run Node.js "global only" w/ BDD interface tests', hiddenFromHelp: true @@ -189,7 +217,7 @@ module.exports = { globalTdd: { script: test( 'only-global-tdd', - '--ui tdd test/only/global/tdd.spec' + '--ui tdd test/only/global/tdd.spec --no-parallel' ), description: 'Run Node.js "global only" w/ TDD interface tests', hiddenFromHelp: true @@ -197,7 +225,7 @@ module.exports = { globalQunit: { script: test( 'only-global-qunit', - '--ui qunit test/only/global/qunit.spec' + '--ui qunit test/only/global/qunit.spec --no-parallel' ), description: 'Run Node.js "global only" w/ QUnit interface tests', hiddenFromHelp: true diff --git a/package.json b/package.json index 63d6f02c90..5234befd34 100644 --- a/package.json +++ b/package.json @@ -66,10 +66,13 @@ "minimatch": "3.0.4", "ms": "2.1.2", "object.assign": "4.1.0", + "promise.allsettled": "1.0.2", + "serialize-javascript": "3.0.0", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", "which": "2.0.2", "wide-align": "1.1.3", + "workerpool": "6.0.0", "yargs": "13.3.2", "yargs-parser": "13.1.2", "yargs-unparser": "1.6.0" @@ -130,7 +133,7 @@ "svgo": "^1.3.2", "through2": "^3.0.1", "to-vfile": "^6.1.0", - "unexpected": "^11.13.0", + "unexpected": "^11.14.0", "unexpected-eventemitter": "^2.2.0", "unexpected-sinon": "^10.11.2", "uslug": "^1.0.4", @@ -152,14 +155,19 @@ }, "browser": { "./index.js": "./browser-entry.js", - "./lib/growl.js": "./lib/browser/growl.js", + "./lib/nodejs/growl.js": "./lib/browser/growl.js", "tty": "./lib/browser/tty.js", "./lib/cli/*.js": false, "chokidar": false, "fs": false, "glob": false, "path": false, - "supports-color": false + "supports-color": false, + "./lib/nodejs/serializer.js": false, + "./lib/nodejs/worker.js": false, + "./lib/nodejs/buffered-worker-pool.js": false, + "./lib/nodejs/parallel-buffered-runner.js": false, + "./lib/nodejs/reporters/parallel-buffered.js": false }, "prettier": { "singleQuote": true, diff --git a/test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js b/test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js new file mode 100644 index 0000000000..18eb7421bd --- /dev/null +++ b/test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js @@ -0,0 +1,7 @@ +'use strict'; + +it('should fail if in a worker', function() { + if (!require('workerpool').isMainThread) { + throw new Error('in worker!'); + } +}); diff --git a/test/integration/fixtures/options/parallel/bail.fixture.js b/test/integration/fixtures/options/parallel/bail.fixture.js new file mode 100644 index 0000000000..77692d56d1 --- /dev/null +++ b/test/integration/fixtures/options/parallel/bail.fixture.js @@ -0,0 +1,9 @@ +describe('some suite', function() { + this.bail(true); + + it('should bail', function() { + throw new Error(); + }); + + it('will not get run', function() {}); +}); diff --git a/test/integration/fixtures/options/parallel/exclusive-test-a.fixture.js b/test/integration/fixtures/options/parallel/exclusive-test-a.fixture.js new file mode 100644 index 0000000000..93bf8853ce --- /dev/null +++ b/test/integration/fixtures/options/parallel/exclusive-test-a.fixture.js @@ -0,0 +1,3 @@ +describe.only('it should only run this, but it does not', function() { + it('should do a thing', function() {}); +}); diff --git a/test/integration/fixtures/options/parallel/exclusive-test-b.fixture.js b/test/integration/fixtures/options/parallel/exclusive-test-b.fixture.js new file mode 100644 index 0000000000..bb05dd496e --- /dev/null +++ b/test/integration/fixtures/options/parallel/exclusive-test-b.fixture.js @@ -0,0 +1,3 @@ +describe('it should run this anyway', function() { + it('should do a different thing', function() {}); +}); diff --git a/test/integration/fixtures/options/parallel/retries-a.fixture.js b/test/integration/fixtures/options/parallel/retries-a.fixture.js new file mode 100644 index 0000000000..e5d37ca561 --- /dev/null +++ b/test/integration/fixtures/options/parallel/retries-a.fixture.js @@ -0,0 +1,5 @@ +describe('retry suite A', function() { + it('should pass', function() { + + }); +}); \ No newline at end of file diff --git a/test/integration/fixtures/options/parallel/retries-b.fixture.js b/test/integration/fixtures/options/parallel/retries-b.fixture.js new file mode 100644 index 0000000000..c1f1d3f7ba --- /dev/null +++ b/test/integration/fixtures/options/parallel/retries-b.fixture.js @@ -0,0 +1,8 @@ +describe('retry suite B', function() { + let count = 0; + it('should retry', function() { + this.retries(3); + console.log(`count: ${++count}`); + throw new Error('failure'); + }); +}); \ No newline at end of file diff --git a/test/integration/fixtures/options/parallel/syntax-err.fixture.js b/test/integration/fixtures/options/parallel/syntax-err.fixture.js new file mode 100644 index 0000000000..8dd2b0f93b --- /dev/null +++ b/test/integration/fixtures/options/parallel/syntax-err.fixture.js @@ -0,0 +1 @@ +var foo = \ No newline at end of file diff --git a/test/integration/fixtures/options/parallel/test-a.fixture.js b/test/integration/fixtures/options/parallel/test-a.fixture.js new file mode 100644 index 0000000000..43f53bbda8 --- /dev/null +++ b/test/integration/fixtures/options/parallel/test-a.fixture.js @@ -0,0 +1,3 @@ +describe('a', function() { + it('should pass', function() {}); +}); diff --git a/test/integration/fixtures/options/parallel/test-b.fixture.js b/test/integration/fixtures/options/parallel/test-b.fixture.js new file mode 100644 index 0000000000..8e6437a56a --- /dev/null +++ b/test/integration/fixtures/options/parallel/test-b.fixture.js @@ -0,0 +1,3 @@ +describe('b', function() { + it('should be pending'); +}); diff --git a/test/integration/fixtures/options/parallel/test-c.fixture.js b/test/integration/fixtures/options/parallel/test-c.fixture.js new file mode 100644 index 0000000000..d06b6a3ee6 --- /dev/null +++ b/test/integration/fixtures/options/parallel/test-c.fixture.js @@ -0,0 +1,5 @@ +describe('c', function() { + it('should fail', function() { + throw new Error('failure'); + }); +}); diff --git a/test/integration/fixtures/options/parallel/test-d.fixture.js b/test/integration/fixtures/options/parallel/test-d.fixture.js new file mode 100644 index 0000000000..ee19d54594 --- /dev/null +++ b/test/integration/fixtures/options/parallel/test-d.fixture.js @@ -0,0 +1,7 @@ +describe('d', function() { + it('should pass, then fail', function() { + process.nextTick(function() { + throw new Error('uncaught!!'); + }); + }); +}); diff --git a/test/integration/fixtures/options/parallel/uncaught.fixture.js b/test/integration/fixtures/options/parallel/uncaught.fixture.js new file mode 100644 index 0000000000..bca1610ab7 --- /dev/null +++ b/test/integration/fixtures/options/parallel/uncaught.fixture.js @@ -0,0 +1,7 @@ +'use strict'; + +it('throws an uncaught exception', function (done) { + process.nextTick(function () { + throw new Error('existential isolation!!'); + }); +}); diff --git a/test/integration/helpers.js b/test/integration/helpers.js index 17a1acfea1..6475262443 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -241,11 +241,29 @@ function toJSONRunResult(result) { /** * Creates arguments loading a default fixture if none provided * + * - The `--no-color` arg is always used (color output complicates testing `STDOUT`) + * - Unless `--bail` or `--no-bail` is set, use `--no-bail`. This enables using + * `--bail` (if desired) from the command-line when running our integration + * test suites without stepping on the toes of subprocesses. + * - Unless `--parallel` or `--no-parallel` is set, use `--no-parallel`. We + * assume the test suite is _already_ running in parallel--and there's no point + * in trying to run a single test fixture in parallel. + * - The {@link DEFAULT_FIXTURE} file is used if no arguments are provided. + * * @param {string[]|*} [args] - Arguments to `spawn` * @returns string[] */ function defaultArgs(args) { - return !args || !args.length ? ['--file', DEFAULT_FIXTURE] : args; + var newArgs = (!args || !args.length ? [DEFAULT_FIXTURE] : args).concat([ + '--no-color' + ]); + if (!newArgs.some(arg => /--(no-)?bail/.test(arg))) { + newArgs.push('--no-bail'); + } + if (!newArgs.some(arg => /--(no-)?parallel/.test(arg))) { + newArgs.push('--no-parallel'); + } + return newArgs; } function invokeMocha(args, fn, opts) { @@ -374,7 +392,9 @@ function resolveFixturePath(fixture) { if (path.extname(fixture) !== '.js' && path.extname(fixture) !== '.mjs') { fixture += '.fixture.js'; } - return path.join('test', 'integration', 'fixtures', fixture); + return path.isAbsolute(fixture) + ? fixture + : path.join('test', 'integration', 'fixtures', fixture); } /** diff --git a/test/integration/options/jobs.spec.js b/test/integration/options/jobs.spec.js new file mode 100644 index 0000000000..832d825298 --- /dev/null +++ b/test/integration/options/jobs.spec.js @@ -0,0 +1,34 @@ +'use strict'; + +var helpers = require('../helpers'); +var runMochaAsync = helpers.runMochaAsync; + +describe('--jobs', function() { + describe('when set to a number less than 2', function() { + it('should run tests in serial', function() { + return expect( + runMochaAsync( + 'options/jobs/fail-in-parallel', + ['--parallel', '--jobs', '1'], + 'pipe' + ), + 'when fulfilled', + 'to have passed' + ); + }); + }); + + describe('when set to a number greater than 1', function() { + it('should run tests in parallel', function() { + return expect( + runMochaAsync( + 'options/jobs/fail-in-parallel', + ['--parallel', '--jobs', '2'], + 'pipe' + ), + 'when fulfilled', + 'to have failed' + ); + }); + }); +}); diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js new file mode 100644 index 0000000000..8fc5389943 --- /dev/null +++ b/test/integration/options/parallel.spec.js @@ -0,0 +1,489 @@ +'use strict'; +var Mocha = require('../../../lib/mocha'); +var path = require('path'); +var helpers = require('../helpers'); +var runMochaAsync = helpers.runMochaAsync; +var invokeMochaAsync = helpers.invokeMochaAsync; +var getSummary = helpers.getSummary; +var utils = require('../../../lib/utils'); + +function compareReporters(reporter) { + return runMochaAsync(path.join('options', 'parallel', 'test-a.fixture.js'), [ + '--reporter', + reporter, + '--no-parallel' + ]).then(function(expected) { + expected.output = expected.output.replace(/\d+m?s/g, '100ms'); + return runMochaAsync( + path.join('options', 'parallel', 'test-a.fixture.js'), + ['--reporter', reporter, '--parallel'] + ).then(function(actual) { + actual.output = actual.output.replace(/\d+m?s/g, '100ms'); + return [actual, expected]; + }); + }); +} + +function runGenericReporterTest(reporter) { + return compareReporters.call(this, reporter).then(function(result) { + var expected = result.shift(); + var actual = result.shift(); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: expected.output + }); + }); +} + +describe('--parallel', function() { + describe('when a test has a syntax error', function() { + describe('when there is only a single test file', function() { + it('should fail gracefully', function() { + return expect( + runMochaAsync('options/parallel/syntax-err', ['--parallel']), + 'when fulfilled', + 'to have failed with output', + /SyntaxError/ + ); + }); + }); + + describe('when there are multiple test files', function() { + it('should fail gracefully', function() { + return expect( + invokeMochaAsync( + [ + require.resolve( + '../fixtures/options/parallel/syntax-err.fixture.js' + ), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ); + }); + }); + }); + + describe('when used with CJS tests', function() { + it('should have the same result as with --no-parallel', function() { + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--no-parallel'] + ).then(function(expected) { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel' + ]), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + }); + + describe('when used with ESM tests', function() { + var esmArgs = + Number(process.versions.node.split('.')[0]) >= 13 + ? [] + : ['--experimental-modules']; + + before(function() { + if (!utils.supportsEsModules()) this.skip(); + }); + + it('should have the same result as with --no-parallel', function() { + var glob = path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs'); + return invokeMochaAsync(esmArgs.concat('--no-parallel', glob))[1].then( + function(expected) { + expected = getSummary(expected); + return invokeMochaAsync(esmArgs.concat('--parallel', glob))[1].then( + function(actual) { + actual = getSummary(actual); + expect(actual, 'to satisfy', { + pending: expected.pending, + passing: expected.passing, + failing: expected.failing + }); + } + ); + } + ); + }); + }); + + describe('when used with --retries', function() { + it('should retry tests appropriately', function() { + return expect( + runMochaAsync( + path.join('options', 'parallel', 'retries-*.fixture.js'), + ['--parallel'] + ), + 'when fulfilled', + 'to have failed' + ) + .and('when fulfilled', 'to have passed test count', 1) + .and('when fulfilled', 'to have pending test count', 0) + .and('when fulfilled', 'to have failed test count', 1) + .and('when fulfilled', 'to contain output', /count: 3/); + }); + }); + + describe('when used with --allow-uncaught', function() { + it('should bubble up an exception', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/uncaught.fixture.js'), + '--parallel', + '--allow-uncaught' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /Error: existential isolation/i + ).and('when fulfilled', 'to have exit code', 1); + }); + }); + + describe('when used with --file', function() { + it('should error out', function() { + return expect( + invokeMochaAsync( + [ + '--file', + path.join('options', 'parallel', 'test-a.fixture.js'), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /mutually exclusive with --file/ + ); + }); + }); + + describe('when used with --sort', function() { + it('should error out', function() { + return expect( + invokeMochaAsync( + [ + '--sort', + path.join( + __dirname, + '..', + 'fixtures', + 'options', + 'parallel', + 'test-*.fixture.js' + ), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /mutually exclusive with --sort/ + ); + }); + }); + + describe('when used with exclusive tests', function() { + it('should error out', function() { + return expect( + invokeMochaAsync( + [ + path.join( + __dirname, + '..', + 'fixtures', + 'options', + 'parallel', + 'exclusive-test-*.fixture.js' + ), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /`\.only` is not supported in parallel mode/ + ); + }); + }); + + describe('when used with --bail', function() { + it('should skip some tests', function() { + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--parallel', '--bail'] + ).then(function(result) { + // we don't know _exactly_ how many tests will be skipped here + // due to the --bail, but the number of tests completed should be + // less than the total, which is 5. + return expect( + result.passing + result.pending + result.failing, + 'to be less than', + 5 + ); + }); + }); + + it('should fail', function() { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel', + '--bail' + ]), + 'when fulfilled', + 'to have failed' + ); + }); + }); + + describe('when encountering a "bail" in context', function() { + it('should skip some tests', function() { + return runMochaAsync('options/parallel/bail', ['--parallel']).then( + function(result) { + return expect( + result.passing + result.pending + result.failing, + 'to be less than', + 2 + ); + } + ); + }); + + it('should fail', function() { + return expect( + runMochaAsync('options/parallel/bail', ['--parallel', '--bail']), + 'when fulfilled', + 'to have failed' + ); + }); + }); + + describe('when used with "grep"', function() { + it('should be equivalent to running in serial', function() { + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--no-parallel', '--grep="suite d"'] + ).then(function(expected) { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel', + '--grep="suite d"' + ]), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + }); + + describe('reporter equivalence', function() { + // each reporter name is duplicated; one is in all lower-case + // 'base' is abstract, 'html' is browser-only, others are incompatible + var DENY = ['progress', 'base', 'html', 'markdown', 'json-stream']; + Object.keys(Mocha.reporters) + .filter(function(name) { + return /^[a-z]/.test(name) && DENY.indexOf(name) === -1; + }) + .forEach(function(reporter) { + describe( + 'when multiple test files run with --reporter=' + reporter, + function() { + it('should have the same result as when run with --no-parallel', function() { + // note that the output may not be in the same order, as running file + // order is non-deterministic in parallel mode + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--reporter', reporter, '--no-parallel'] + ).then(function(expected) { + return expect( + runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--reporter', reporter, '--parallel'] + ), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + } + ); + }); + }); + + describe('when a single test file is run with --reporter=dot', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'dot'); + }); + }); + + describe('when a single test file is run with --reporter=doc', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'doc'); + }); + }); + + describe('when a single test file is run with --reporter=tap', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'tap'); + }); + }); + + describe('when a single test file is run with --reporter=list', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'list'); + }); + }); + + describe('when a single test file is run with --reporter=min', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'min'); + }); + }); + + describe('when a single test file is run with --reporter=spec', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'spec'); + }); + }); + + describe('when a single test file is run with --reporter=nyan', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'nyan'); + }); + }); + + describe('when a single test file is run with --reporter=landing', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'landing'); + }); + }); + + describe('when a single test file is run with --reporter=progress', function() { + it('should fail due to incompatibility', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/test-a.fixture.js'), + '--reporter=progress', + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ).and('when fulfilled', 'to contain output', /mutually exclusive/); + }); + }); + + describe('when a single test file is run with --reporter=markdown', function() { + it('should fail due to incompatibility', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/test-a.fixture.js'), + '--reporter=markdown', + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ).and('when fulfilled', 'to contain output', /mutually exclusive/); + }); + }); + + describe('when a single test file is run with --reporter=json-stream', function() { + it('should fail due to incompatibility', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/test-a.fixture.js'), + '--reporter=json-stream', + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ).and('when fulfilled', 'to contain output', /mutually exclusive/); + }); + }); + + describe('when a single test file is run with --reporter=json', function() { + it('should have the same output as when run with --no-parallel', function() { + // this one has some timings/durations that we can safely ignore + return compareReporters.call(this, 'json').then(function(result) { + var expected = result.shift(); + expected.output = JSON.parse(expected.output); + var actual = result.shift(); + actual.output = JSON.parse(actual.output); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: { + stats: { + suites: expected.output.stats.suites, + tests: expected.output.stats.tests, + passes: expected.output.stats.passes, + pending: expected.output.stats.pending, + failures: expected.output.stats.failures + }, + tests: expected.tests + } + }); + }); + }); + }); + + describe('when a single test file is run with --reporter=xunit', function() { + it('should have the same output as when run with --no-parallel', function() { + // durations need replacing + return compareReporters.call(this, 'xunit').then(function(result) { + var expected = result.shift(); + expected.output = expected.output + .replace(/time=".+?"/g, 'time="0.5"') + .replace(/timestamp=".+?"/g, 'timestamp="some-timestamp'); + var actual = result.shift(); + actual.output = actual.output + .replace(/time=".+?"/g, 'time="0.5"') + .replace(/timestamp=".+?"/g, 'timestamp="some-timestamp'); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: expected.output + }); + }); + }); + }); +}); diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js new file mode 100644 index 0000000000..1fe935ddc2 --- /dev/null +++ b/test/node-unit/buffered-runner.spec.js @@ -0,0 +1,553 @@ +'use strict'; + +const { + EVENT_RUN_BEGIN, + EVENT_TEST_PASS, + EVENT_TEST_FAIL, + EVENT_SUITE_END, + EVENT_SUITE_BEGIN +} = require('../../lib/runner').constants; +const rewiremock = require('rewiremock/node'); +const BUFFERED_RUNNER_PATH = require.resolve( + '../../lib/nodejs/parallel-buffered-runner.js' +); +const Suite = require('../../lib/suite'); +const Runner = require('../../lib/runner'); +const {createSandbox} = require('sinon'); + +describe('buffered-runner', function() { + describe('BufferedRunner', function() { + let sandbox; + let run; + let BufferedWorkerPool; + let terminate; + let BufferedRunner; + let suite; + let warn; + let cpuCount; + + beforeEach(function() { + sandbox = createSandbox(); + cpuCount = 1; + suite = new Suite('a root suite', {}, true); + warn = sandbox.stub(); + + // tests will want to further define the behavior of these. + run = sandbox.stub(); + terminate = sandbox.stub(); + BufferedWorkerPool = { + create: sandbox.stub().returns({ + run, + terminate, + stats: sandbox.stub().returns({}) + }) + }; + BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, r => ({ + '../../lib/nodejs/buffered-worker-pool': { + BufferedWorkerPool + }, + os: { + cpus: sandbox.stub().callsFake(() => new Array(cpuCount)) + }, + '../../lib/utils': r.with({warn}).callThrough() + })); + }); + + describe('constructor', function() { + it('should start in "IDLE" state', function() { + expect(new BufferedRunner(suite), 'to have property', '_state', 'IDLE'); + }); + }); + + describe('instance property', function() { + let runner; + + beforeEach(function() { + runner = new BufferedRunner(suite); + }); + + describe('_state', function() { + it('should disallow an invalid state transition', function() { + expect( + () => { + runner._state = 'BAILED'; + }, + 'to throw', + /invalid state transition/ + ); + }); + }); + }); + + describe('event', function() { + let runner; + + beforeEach(function() { + runner = new BufferedRunner(suite); + }); + + describe('EVENT_RUN_END', function() { + it('should change the state to COMPLETE', function() { + runner._state = 'RUNNING'; + runner.emit(Runner.constants.EVENT_RUN_END); + expect(runner._state, 'to be', 'COMPLETE'); + }); + }); + }); + + describe('instance method', function() { + describe('run', function() { + let runner; + + beforeEach(function() { + runner = new BufferedRunner(suite); + }); + + // the purpose of this is to ensure that--despite using `Promise`s + // internally--`BufferedRunner#run` does not return a `Promise`. + it('should be chainable', function(done) { + expect(runner.run(done, {files: [], options: {}}), 'to be', runner); + }); + + it('should emit `EVENT_RUN_BEGIN`', async function() { + return expect( + () => + new Promise(resolve => { + runner.run(resolve, {files: [], options: {}}); + }), + 'to emit from', + runner, + EVENT_RUN_BEGIN + ); + }); + + describe('when a worker fails', function() { + it('should recover', function(done) { + const options = {}; + run.withArgs('some-file.js', options).rejects(new Error('whoops')); + run.withArgs('some-other-file.js', options).resolves({ + failureCount: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [{args: []}]); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + + it('should delegate to Runner#uncaught', function(done) { + const options = {}; + sandbox.spy(runner, 'uncaught'); + const err = new Error('whoops'); + run.withArgs('some-file.js', options).rejects(new Error('whoops')); + run.withArgs('some-other-file.js', options).resolves({ + failureCount: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(runner.uncaught, 'to have a call satisfying', [err]); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + }); + + describe('when suite should bail', function() { + describe('when no event contains an error', function() { + it('should not force-terminate', function(done) { + run.resolves({ + failureCount: 0, + events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite', + _bail: true + } + }, + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have a call satisfying', { + args: [] + }).and('was called once'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options: {} + } + ); + }); + }); + + describe('when an event contains an error and has positive failures', function() { + describe('when subsequent files have not yet been run', function() { + it('should cleanly terminate the thread pool', function(done) { + const options = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', options).resolves({ + failureCount: 1, + events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite', + _bail: true + } + }, + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', options).rejects(); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: []}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + }); + describe('when subsequent files already started running', function() { + it('should cleanly terminate the thread pool', function(done) { + const options = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', options).resolves({ + failureCount: 1, + events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite', + _bail: true + } + }, + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', options).resolves({ + failureCount: 0, + events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite' + } + }, + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: []}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + }); + }); + }); + + describe('when a suite has a bail flag', function() { + describe('when no event contains an error', function() { + it('should not force-terminate', function(done) { + run.resolves({ + failureCount: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have a call satisfying', { + args: [] + }).and('was called once'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options: {} + } + ); + }); + }); + describe('when an event contains an error and has positive failures', function() { + describe('when subsequent files have not yet been run', function() { + it('should cleanly terminate the thread pool', function(done) { + const options = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', options).resolves({ + failureCount: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', options).rejects(); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: []}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + }); + + describe('when subsequent files already started running', function() { + it('should cleanly terminate the thread pool', function(done) { + const options = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', options).resolves({ + failureCount: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', options).resolves({ + failureCount: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: []}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + }); + + describe('when subsequent files have not yet been run', function() { + it('should cleanly terminate the thread pool', function(done) { + const options = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', options).resolves({ + failureCount: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', options).rejects(); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: []}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + options + } + ); + }); + }); + }); + }); + }); + }); + }); +}); diff --git a/test/node-unit/buffered-worker-pool.spec.js b/test/node-unit/buffered-worker-pool.spec.js new file mode 100644 index 0000000000..1c0b7a9083 --- /dev/null +++ b/test/node-unit/buffered-worker-pool.spec.js @@ -0,0 +1,186 @@ +'use strict'; + +const rewiremock = require('rewiremock/node'); +const {createSandbox} = require('sinon'); + +describe('class BufferedWorkerPool', function() { + let BufferedWorkerPool; + let sandbox; + let pool; + let stats; + let serializeJavascript; + let serializer; + let result; + + beforeEach(function() { + sandbox = createSandbox(); + stats = {totalWorkers: 10, busyWorkers: 8, idleWorkers: 2, pendingTasks: 3}; + result = {failures: 0, events: []}; + pool = { + terminate: sandbox.stub().resolves(), + exec: sandbox.stub().resolves(result), + stats: sandbox.stub().returns(stats) + }; + serializer = { + deserialize: sandbox.stub() + }; + + serializeJavascript = sandbox.spy(require('serialize-javascript')); + BufferedWorkerPool = rewiremock.proxy( + require.resolve('../../lib/nodejs/buffered-worker-pool'), + { + workerpool: { + pool: sandbox.stub().returns(pool), + cpus: 8 + }, + '../../lib/nodejs/serializer': serializer, + 'serialize-javascript': serializeJavascript + } + ).BufferedWorkerPool; + + // reset cache + BufferedWorkerPool.resetOptionsCache(); + }); + + afterEach(function() { + sandbox.restore(); + }); + + describe('static method', function() { + describe('create()', function() { + it('should return a BufferedWorkerPool instance', function() { + expect( + BufferedWorkerPool.create({foo: 'bar'}), + 'to be a', + BufferedWorkerPool + ); + }); + + describe('when passed no arguments', function() { + it('should not throw', function() { + expect(BufferedWorkerPool.create, 'not to throw'); + }); + }); + }); + + describe('serializeOptions()', function() { + describe('when passed no arguments', function() { + it('should not throw', function() { + expect(BufferedWorkerPool.serializeOptions, 'not to throw'); + }); + }); + + it('should return a serialized string', function() { + expect( + BufferedWorkerPool.serializeOptions({foo: 'bar'}), + 'to be a', + 'string' + ); + }); + + describe('when called multiple times with the same object', function() { + it('should not perform serialization twice', function() { + const obj = {foo: 'bar'}; + BufferedWorkerPool.serializeOptions(obj); + BufferedWorkerPool.serializeOptions(obj); + expect(serializeJavascript, 'was called once'); + }); + + it('should return the same value', function() { + const obj = {foo: 'bar'}; + expect( + BufferedWorkerPool.serializeOptions(obj), + 'to be', + BufferedWorkerPool.serializeOptions(obj) + ); + }); + }); + }); + }); + + describe('constructor', function() { + it('should apply defaults', function() { + expect(new BufferedWorkerPool(), 'to satisfy', { + options: { + workerType: 'process', + forkOpts: {execArgv: process.execArgv}, + maxWorkers: expect.it('to be greater than or equal to', 1) + } + }); + }); + }); + + describe('instance method', function() { + let workerPool; + + beforeEach(function() { + workerPool = BufferedWorkerPool.create(); + }); + + describe('stats()', function() { + it('should return the object returned by `workerpool.Pool#stats`', function() { + expect(workerPool.stats(), 'to be', stats); + }); + }); + + describe('run()', function() { + describe('when passed no arguments', function() { + it('should reject', async function() { + return expect(workerPool.run(), 'to be rejected with', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + describe('when passed a non-string filepath', function() { + it('should reject', async function() { + return expect(workerPool.run(123), 'to be rejected with', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + it('should serialize the options object', async function() { + await workerPool.run('file.js', {foo: 'bar'}); + + expect(pool.exec, 'to have a call satisfying', [ + 'run', + ['file.js', '{"foo":"bar"}'] + ]).and('was called once'); + }); + + it('should deserialize the result', async function() { + await workerPool.run('file.js', {foo: 'bar'}); + expect(serializer.deserialize, 'to have a call satisfying', [ + result + ]).and('was called once'); + }); + }); + + describe('terminate()', function() { + describe('when called with `force`', function() { + beforeEach(async function() { + await workerPool.terminate(true); + }); + + it('should delegate to the underlying pool w/ "force" behavior', async function() { + expect(pool.terminate, 'to have a call satisfying', [true]).and( + 'was called once' + ); + }); + }); + + describe('when called without `force`', function() { + beforeEach(async function() { + await workerPool.terminate(); + }); + + it('should delegate to the underlying pool w/o "force" behavior', async function() { + expect(pool.terminate, 'to have a call satisfying', [false]).and( + 'was called once' + ); + }); + }); + }); + }); +}); diff --git a/test/node-unit/mocha.spec.js b/test/node-unit/mocha.spec.js index 2b9c99a59a..fc701aadc6 100644 --- a/test/node-unit/mocha.spec.js +++ b/test/node-unit/mocha.spec.js @@ -27,19 +27,38 @@ describe('Mocha', function() { warn: sandbox.stub(), isString: sandbox.stub(), noop: sandbox.stub(), - cwd: sandbox.stub().returns(process.cwd()) + cwd: sandbox.stub().returns(process.cwd()), + isBrowser: sandbox.stub().returns(false) }; stubs.suite = Object.assign(sandbox.createStubInstance(EventEmitter), { slow: sandbox.stub(), timeout: sandbox.stub(), - bail: sandbox.stub() + bail: sandbox.stub(), + reset: sandbox.stub(), + dispose: sandbox.stub() }); stubs.Suite = sandbox.stub().returns(stubs.suite); stubs.Suite.constants = {}; + stubs.BufferedRunner = sandbox.stub().returns({}); + const runner = Object.assign(sandbox.createStubInstance(EventEmitter), { + run: sandbox + .stub() + .callsArgAsync(0) + .returnsThis(), + globals: sandbox.stub(), + grep: sandbox.stub(), + dispose: sandbox.stub() + }); + stubs.Runner = sandbox.stub().returns(runner); + // the Runner constructor is the main export, and constants is a static prop. + // we don't need the constants themselves, but the object cannot be undefined + stubs.Runner.constants = {}; Mocha = rewiremock.proxy(MODULE_PATH, r => ({ - '../../lib/utils': r.with(stubs.utils).callThrough(), - '../../lib/suite': stubs.Suite + '../../lib/utils.js': r.with(stubs.utils).callThrough(), + '../../lib/suite.js': stubs.Suite, + '../../lib/nodejs/parallel-buffered-runner.js': stubs.BufferedRunner, + '../../lib/runner.js': stubs.Runner })); delete require.cache[DUMB_FIXTURE_PATH]; delete require.cache[DUMBER_FIXTURE_PATH]; @@ -58,6 +77,108 @@ describe('Mocha', function() { mocha = new Mocha(opts); }); + describe('parallelMode()', function() { + describe('when `Mocha` is running in Node.js', function() { + it('should return the Mocha instance', function() { + expect(mocha.parallelMode(), 'to be', mocha); + }); + + describe('when parallel mode is already enabled', function() { + beforeEach(function() { + mocha.options.parallel = true; + mocha._runnerClass = stubs.BufferedRunner; + mocha._lazyLoadFiles = true; + }); + + it('should not swap the Runner, nor change lazy loading setting', function() { + expect(mocha.parallelMode(true), 'to satisfy', { + options: {parallel: true}, + _runnerClass: stubs.BufferedRunner, + _lazyLoadFiles: true + }); + }); + }); + + describe('when parallel mode is already disabled', function() { + beforeEach(function() { + mocha.options.parallel = false; + mocha._runnerClass = Mocha.Runner; + mocha._lazyLoadFiles = false; + }); + + it('should not swap the Runner, nor change lazy loading setting', function() { + expect(mocha.parallelMode(false), 'to satisfy', { + options: {parallel: false}, + _runnerClass: Mocha.Runner, + _lazyLoadFiles: false + }); + }); + }); + + describe('when `Mocha` instance in serial mode', function() { + beforeEach(function() { + mocha.options.parallel = false; + }); + + describe('when passed `true` value', function() { + describe('when `Mocha` instance is in `INIT` state', function() { + beforeEach(function() { + mocha._state = 'init'; + // this is broken + this.skip(); + }); + + it('should enable parallel mode', function() { + expect(mocha.parallelMode(true), 'to satisfy', { + _runnerClass: stubs.BufferedRunner, + options: { + parallel: true + }, + _lazyLoadFiles: true + }); + }); + }); + + describe('when `Mocha` instance is not in `INIT` state', function() { + beforeEach(function() { + mocha._state = 'disposed'; + }); + + it('should throw', function() { + expect( + function() { + mocha.parallelMode(true); + }, + 'to throw', + { + code: 'ERR_MOCHA_UNSUPPORTED' + } + ); + }); + }); + }); + + describe('when passed non-`true` value', function() { + describe('when `Mocha` instance is in `INIT` state', function() { + beforeEach(function() { + mocha._state = 'init'; + }); + + it('should enable serial mode', function() { + expect(mocha.parallelMode(0), 'to satisfy', { + _runnerClass: Mocha.Runner, + options: { + parallel: false + }, + _lazyLoadFiles: false + }); + }); + }); + }); + }); + }); + }); + describe('addFile()', function() { it('should add the given file to the files array', function() { mocha.addFile('some-file.js'); @@ -160,7 +281,7 @@ describe('Mocha', function() { expect( function() { mocha.reporter( - '../../test/node-nit/fixtures/wonky-reporter.fixture.js' + './test/node-unit/fixtures/wonky-reporter.fixture.js' ); }, 'to throw', @@ -173,7 +294,7 @@ describe('Mocha', function() { it('should warn about the error before throwing', function() { try { mocha.reporter( - require.resolve('./fixtures/wonky-reporter.fixture.js') + './test/node-unit/fixtures/wonky-reporter.fixture.js' ); } catch (ignored) { } finally { @@ -185,6 +306,45 @@ describe('Mocha', function() { }); }); }); + + describe('unloadFiles()', function() { + it('should reset referencesCleaned and allow for next run', function(done) { + mocha.run(function() { + mocha.unloadFiles(); + mocha.run(done); + }); + }); + + it('should not be allowed when the current instance is already disposed', function() { + mocha.dispose(); + expect( + function() { + mocha.unloadFiles(); + }, + 'to throw', + 'Mocha instance is already disposed, it cannot be used again.' + ); + }); + }); + + describe('lazyLoadFiles()', function() { + it('should return the `Mocha` instance', function() { + expect(mocha.lazyLoadFiles(), 'to be', mocha); + }); + describe('when passed a non-`true` value', function() { + it('should enable eager loading', function() { + mocha.lazyLoadFiles(0); + expect(mocha._lazyLoadFiles, 'to be false'); + }); + }); + + describe('when passed `true`', function() { + it('should enable lazy loading', function() { + mocha.lazyLoadFiles(true); + expect(mocha._lazyLoadFiles, 'to be true'); + }); + }); + }); }); describe('static method', function() { diff --git a/test/node-unit/reporters/parallel-buffered.spec.js b/test/node-unit/reporters/parallel-buffered.spec.js new file mode 100644 index 0000000000..65a74a72b8 --- /dev/null +++ b/test/node-unit/reporters/parallel-buffered.spec.js @@ -0,0 +1,230 @@ +'use strict'; + +// this reporter does not actually output anything to the terminal, so we +// need to test it differently. + +const { + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_PENDING, + EVENT_TEST_BEGIN, + EVENT_TEST_END, + EVENT_TEST_RETRY, + EVENT_DELAY_BEGIN, + EVENT_DELAY_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END, + EVENT_RUN_END +} = require('../../../lib/runner').constants; +const {EventEmitter} = require('events'); +const {createSandbox} = require('sinon'); +const rewiremock = require('rewiremock/node'); + +describe('ParallelBuffered', function() { + let sandbox; + let runner; + let ParallelBuffered; + + beforeEach(function() { + sandbox = createSandbox(); + runner = new EventEmitter(); + ParallelBuffered = rewiremock.proxy( + require.resolve('../../../lib/nodejs/reporters/parallel-buffered'), + { + '../../../lib/nodejs/serializer': { + SerializableEvent: { + create: (eventName, runnable, err) => ({ + eventName, + data: runnable, + error: err, + __type: 'MockSerializableEvent' + }) + }, + SerializableWorkerResult: { + create: (events, failures) => ({ + events, + failures, + __type: 'MockSerializableWorkerResult' + }) + } + }, + '../../../lib/reporters/base': class MockBase {} + } + ); + }); + + afterEach(function() { + sandbox.restore(); + }); + + describe('constructor', function() { + it('should listen for Runner events', function() { + // EventEmitter#once calls thru to EventEmitter#on, which + // befouls our assertion below. + sandbox.stub(runner, 'once'); + sandbox.stub(runner, 'on'); + // eslint-disable-next-line no-new + new ParallelBuffered(runner); + expect(runner.on, 'to have calls satisfying', [ + // via Buffered + [EVENT_SUITE_BEGIN, expect.it('to be a function')], + [EVENT_SUITE_END, expect.it('to be a function')], + [EVENT_TEST_BEGIN, expect.it('to be a function')], + [EVENT_TEST_PENDING, expect.it('to be a function')], + [EVENT_TEST_FAIL, expect.it('to be a function')], + [EVENT_TEST_PASS, expect.it('to be a function')], + [EVENT_TEST_RETRY, expect.it('to be a function')], + [EVENT_TEST_END, expect.it('to be a function')], + [EVENT_HOOK_BEGIN, expect.it('to be a function')], + [EVENT_HOOK_END, expect.it('to be a function')] + ]); + }); + + it('should listen for Runner events expecting to occur once', function() { + sandbox.stub(runner, 'once'); + // eslint-disable-next-line no-new + new ParallelBuffered(runner); + expect(runner.once, 'to have calls satisfying', [ + [EVENT_DELAY_BEGIN, expect.it('to be a function')], + [EVENT_DELAY_END, expect.it('to be a function')], + [EVENT_RUN_END, expect.it('to be a function')] + ]); + }); + }); + + describe('event', function() { + let reporter; + + beforeEach(function() { + reporter = new ParallelBuffered(runner); + }); + + describe('on EVENT_RUN_END', function() { + it('should remove all listeners', function() { + runner.emit(EVENT_RUN_END); + expect(runner.listeners(), 'to be empty'); + }); + }); + + describe('on any other event listened for', function() { + it('should populate its `events` array with SerializableEvents', function() { + const suite = { + title: 'some suite' + }; + const test = { + title: 'some test' + }; + runner.emit(EVENT_SUITE_BEGIN, suite); + runner.emit(EVENT_TEST_BEGIN, test); + runner.emit(EVENT_TEST_PASS, test); + runner.emit(EVENT_TEST_END, test); + runner.emit(EVENT_SUITE_END, suite); + expect(reporter.events, 'to equal', [ + { + eventName: EVENT_SUITE_BEGIN, + data: suite, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_BEGIN, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_PASS, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_END, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_SUITE_END, + data: suite, + __type: 'MockSerializableEvent' + } + ]); + }); + }); + }); + + describe('instance method', function() { + let reporter; + + beforeEach(function() { + reporter = new ParallelBuffered(runner); + }); + + describe('done', function() { + it('should execute its callback with a SerializableWorkerResult', function() { + const suite = { + title: 'some suite' + }; + const test = { + title: 'some test' + }; + runner.emit(EVENT_SUITE_BEGIN, suite); + runner.emit(EVENT_TEST_BEGIN, test); + runner.emit(EVENT_TEST_PASS, test); + runner.emit(EVENT_TEST_END, test); + runner.emit(EVENT_SUITE_END, suite); + const cb = sandbox.stub(); + reporter.done(0, cb); + expect(cb, 'to have a call satisfying', [ + { + events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: suite, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_BEGIN, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_PASS, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_END, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_SUITE_END, + data: suite, + __type: 'MockSerializableEvent' + } + ], + failures: 0, + __type: 'MockSerializableWorkerResult' + } + ]); + }); + + it('should reset its `events` prop', function() { + const suite = { + title: 'some suite' + }; + const test = { + title: 'some test' + }; + runner.emit(EVENT_SUITE_BEGIN, suite); + runner.emit(EVENT_TEST_BEGIN, test); + runner.emit(EVENT_TEST_PASS, test); + runner.emit(EVENT_TEST_END, test); + runner.emit(EVENT_SUITE_END, suite); + const cb = sandbox.stub(); + reporter.done(0, cb); + expect(reporter.events, 'to be empty'); + }); + }); + }); +}); diff --git a/test/node-unit/serializer.spec.js b/test/node-unit/serializer.spec.js new file mode 100644 index 0000000000..79f0093d11 --- /dev/null +++ b/test/node-unit/serializer.spec.js @@ -0,0 +1,555 @@ +'use strict'; + +const {createSandbox} = require('sinon'); +const { + serialize, + deserialize, + SerializableEvent, + SerializableWorkerResult +} = require('../../lib/nodejs/serializer'); + +describe('serializer', function() { + let sandbox; + + beforeEach(function() { + sandbox = createSandbox(); + }); + + afterEach(function() { + sandbox.restore(); + }); + + describe('function', function() { + describe('serialize', function() { + describe('when passed a non-object value', function() { + it('should return the value', function() { + expect(serialize('knees & toes'), 'to be', 'knees & toes'); + }); + }); + + describe('when passed an object value', function() { + describe('w/o a `serialize` method', function() { + it('should return the value', function() { + const obj = {}; + expect(serialize(obj), 'to be', obj); + }); + }); + + describe('having a `serialize` method', function() { + it('should return the result of the `serialize` method', function() { + const serializedObj = {foo: 'bar'}; + const obj = {serialize: sandbox.stub().returns(serializedObj)}; + expect(serialize(obj), 'to be', serializedObj); + }); + }); + }); + + describe('when not passed anything', function() { + it('should return `undefined`', function() { + expect(serialize(), 'to be undefined'); + }); + }); + }); + + describe('deserialize', function() { + describe('when passed nothing', function() { + it('should return `undefined`', function() { + expect(deserialize(), 'to be undefined'); + }); + }); + + describe('when passed a non-object value', function() { + it('should return the value', function() { + expect(deserialize(500), 'to be', 500); + }); + }); + + describe('when passed an object value which is not a SerializedWorkerResult', function() { + it('should return the value', function() { + const obj = {}; + expect(deserialize(obj), 'to be', obj); + }); + }); + + describe('when passed a SerializedWorkerResult object', function() { + // note that SerializedWorkerResult is an interface (typedef), not a class. + + it('should return the result of `SerializableWorkerResult.deserialize` called on the value', function() { + const obj = Object.assign({}, SerializableWorkerResult.create()); + sandbox + .stub(SerializableWorkerResult, 'deserialize') + .returns('butts'); + deserialize(obj); + expect( + SerializableWorkerResult.deserialize, + 'to have a call satisfying', + { + args: [obj], + returned: 'butts' + } + ); + }); + }); + }); + }); + + describe('SerializableEvent', function() { + describe('constructor', function() { + describe('when called without `eventName`', function() { + it('should throw', function() { + expect( + () => new SerializableEvent(), + 'to throw', + /expected a non-empty `eventName`/ + ); + }); + }); + + describe('when called with a non-object `rawObject`', function() { + it('should throw', function() { + expect( + () => new SerializableEvent('blub', 'glug'), + 'to throw', + /expected object, received \[string\]/ + ); + }); + }); + }); + + describe('instance method', function() { + describe('serialize', function() { + it('should mutate the instance in-place', function() { + const evt = SerializableEvent.create('foo'); + expect(evt.serialize(), 'to be', evt); + }); + + it('should freeze the instance', function() { + expect( + Object.isFrozen(SerializableEvent.create('foo').serialize()), + 'to be true' + ); + }); + + describe('when passed an object with a `serialize` method', function() { + it('should call the `serialize` method', function() { + const obj = { + serialize: sandbox.stub() + }; + SerializableEvent.create('some-event', obj).serialize(); + expect(obj.serialize, 'was called once'); + }); + }); + + describe('when passed an object containing an object with a `serialize` method', function() { + it('should call the `serialize` method', function() { + const stub = sandbox.stub(); + const obj = { + nested: { + serialize: stub + } + }; + SerializableEvent.create('some-event', obj).serialize(); + expect(stub, 'was called once'); + }); + }); + + describe('when passed an object containing a non-`serialize` method', function() { + it('should remove the method', function() { + const obj = { + func: () => {} + }; + + expect( + SerializableEvent.create('some-event', obj).serialize(), + 'to satisfy', + { + data: expect.it('not to have property', 'func') + } + ); + }); + }); + + describe('when passed an object containing an array', function() { + it('should serialize the array', function() { + const obj = { + list: [{herp: 'derp'}, {bing: 'bong'}] + }; + expect( + SerializableEvent.create('some-event', obj).serialize(), + 'to satisfy', + {data: {list: [{herp: 'derp'}, {bing: 'bong'}]}} + ); + }); + }); + + describe('when passed an error', function() { + it('should serialize the error', function() { + const obj = {}; + const err = new Error('monkeypants'); + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + + it('should retain own props', function() { + const obj = {}; + const err = new Error('monkeypants'); + err.code = 'MONKEY'; + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + code: 'MONKEY', + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + + it('should not retain not-own props', function() { + const obj = {}; + const err = new Error('monkeypants'); + // eslint-disable-next-line no-proto + err.__proto__.code = 'MONKEY'; + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + }); + + describe('when passed an object containing a top-level prop with an Error value', function() { + it('should serialize the Error', function() { + const obj = { + monkeyError: new Error('pantsmonkey') + }; + const evt = SerializableEvent.create('some-event', obj); + expect(evt.serialize(), 'to satisfy', { + eventName: 'some-event', + data: { + monkeyError: { + message: 'pantsmonkey', + stack: /^Error: pantsmonkey/, + __type: 'Error' + } + } + }); + }); + }); + describe('when passed an object containing a nested prop with an Error value', function() { + it('should serialize the Error', function() { + const obj = { + nestedObj: { + monkeyError: new Error('pantsmonkey') + } + }; + const evt = SerializableEvent.create('some-event', obj); + expect(evt.serialize(), 'to satisfy', { + eventName: 'some-event', + data: { + nestedObj: { + monkeyError: { + message: 'pantsmonkey', + stack: /^Error: pantsmonkey/, + __type: 'Error' + } + } + } + }); + }); + }); + }); + }); + + describe('static method', function() { + describe('deserialize', function() { + describe('when passed a falsy parameter', function() { + it('should throw "invalid arg type" error', function() { + expect(SerializableEvent.deserialize, 'to throw', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + it('should return a new object w/ null prototype', function() { + const obj = {bob: 'bob'}; + expect(SerializableEvent.deserialize(obj), 'to satisfy', obj) + .and('not to equal', obj) + .and('not to have property', 'constructor'); + }); + + describe('when passed value contains `data` prop', function() { + it('should ignore __proto__', function() { + const obj = { + data: Object.create(null) + }; + // eslint-disable-next-line no-proto + obj.data.__proto__ = {peaches: 'prunes'}; + + const expected = Object.assign(Object.create(null), { + data: Object.create(null) + }); + expect(SerializableEvent.deserialize(obj), 'to equal', expected); + }); + + describe('when `data` prop contains a nested serialized Error prop', function() { + it('should create an Error instance from the nested serialized Error prop', function() { + const message = 'problems!'; + const stack = 'problem instructions'; + const code = 'EIEIO'; + const expected = Object.assign(Object.create(null), { + data: { + whoops: Object.assign(new Error(message), { + stack, + code + }) + } + }); + + expect( + SerializableEvent.deserialize({ + data: { + whoops: { + message, + stack, + code, + __type: 'Error' + } + } + }), + 'to equal', + expected + ); + }); + }); + }); + + describe('when passed value contains an `error` prop', function() { + it('should create an Error instance from the prop', function() { + const message = 'problems!'; + const stack = 'problem instructions'; + const code = 'EIEIO'; + const expected = Object.assign(Object.create(null), { + error: Object.assign(new Error(message), { + stack, + code + }) + }); + + expect( + SerializableEvent.deserialize({ + error: { + message, + stack, + code, + __type: 'Error' + } + }), + 'to equal', + expected + ); + }); + }); + + describe('when passed value data contains a prop beginning with "$$"', function() { + let result; + + beforeEach(function() { + result = SerializableEvent.deserialize({data: {$$foo: 'bar'}}); + }); + it('should create a new prop having a function value', function() { + expect(result, 'to satisfy', { + data: { + foo: expect.it('to be a function') + } + }); + }); + + it('should create a new prop returning the original value', function() { + expect(result.data.foo(), 'to equal', 'bar'); + }); + + it('should remove the prop with the "$$" prefix', function() { + expect(result, 'not to have property', '$$foo'); + }); + }); + + describe('when the value data contains a prop with an array value', function() { + beforeEach(function() { + sandbox.spy(SerializableEvent, '_deserializeObject'); + }); + + it('should deserialize each prop', function() { + const obj = {data: {foo: [{bar: 'baz'}]}}; + SerializableEvent.deserialize(obj); + expect( + SerializableEvent._deserializeObject, + 'to have a call satisfying', + { + args: [obj.data.foo, 0] + } + ); + }); + }); + }); + + describe('create', function() { + it('should instantiate a SerializableEvent', function() { + expect( + SerializableEvent.create('some-event'), + 'to be a', + SerializableEvent + ); + }); + }); + }); + }); + + describe('SerializableWorkerResult', function() { + describe('static method', function() { + describe('create', function() { + it('should return a new SerializableWorkerResult instance', function() { + expect( + SerializableWorkerResult.create(), + 'to be a', + SerializableWorkerResult + ); + }); + }); + + describe('isSerializedWorkerResult', function() { + describe('when passed an instance', function() { + it('should return `true`', function() { + expect( + SerializableWorkerResult.isSerializedWorkerResult( + new SerializableWorkerResult() + ), + 'to be true' + ); + }); + }); + + describe('when passed an object with an appropriate `__type` prop', function() { + it('should return `true`', function() { + // this is the most likely use-case, as the object is transmitted over IPC + // and loses its prototype + const original = new SerializableWorkerResult(); + const clone = Object.assign({}, original); + expect( + SerializableWorkerResult.isSerializedWorkerResult(clone), + 'to be true' + ); + }); + }); + + describe('when passed an object without an appropriate `__type` prop', function() { + it('should return `false`', function() { + expect( + SerializableWorkerResult.isSerializedWorkerResult({ + mister: 'mister' + }), + 'to be false' + ); + }); + }); + }); + + describe('deserialize', function() { + beforeEach(function() { + sandbox.stub(SerializableEvent, 'deserialize'); + }); + + it('should call SerializableEvent#deserialize on each item in its `events` prop', function() { + const result = Object.assign( + {}, + SerializableWorkerResult.create([ + {eventName: 'foo'}, + {eventName: 'bar'} + ]) + ); + SerializableWorkerResult.deserialize(result); + expect(SerializableEvent.deserialize, 'to have calls satisfying', [ + {args: [{eventName: 'foo'}]}, + {args: [{eventName: 'bar'}]} + ]); + }); + + it('should return the deserialized value', function() { + const result = Object.assign( + {}, + SerializableWorkerResult.create([ + {eventName: 'foo'}, + {eventName: 'bar'} + ]) + ); + expect( + SerializableWorkerResult.deserialize(result), + 'to equal', + result + ); + }); + }); + }); + + describe('instance method', function() { + describe('serialize', function() { + it('should return a read-only value', function() { + expect( + Object.isFrozen(SerializableWorkerResult.create().serialize()), + 'to be true' + ); + }); + + it('should call `SerializableEvent#serialize` of each of its events', function() { + sandbox.spy(SerializableEvent.prototype, 'serialize'); + const events = [ + SerializableEvent.create('foo'), + SerializableEvent.create('bar') + ]; + SerializableWorkerResult.create(events).serialize(); + expect( + SerializableEvent.prototype.serialize, + 'to have calls satisfying', + [{thisValue: events[0]}, {thisValue: events[1]}] + ); + }); + }); + }); + describe('constructor', function() { + // the following two tests should be combined into one, but not sure how to express + // as a single assertion + + it('should add a readonly `__type` prop', function() { + expect( + new SerializableWorkerResult(), + 'to have readonly property', + '__type' + ); + }); + }); + }); +}); diff --git a/test/node-unit/worker.spec.js b/test/node-unit/worker.spec.js new file mode 100644 index 0000000000..78dfa1ac86 --- /dev/null +++ b/test/node-unit/worker.spec.js @@ -0,0 +1,223 @@ +'use strict'; + +const serializeJavascript = require('serialize-javascript'); +const rewiremock = require('rewiremock/node'); +const {SerializableWorkerResult} = require('../../lib/nodejs/serializer'); +const {createSandbox} = require('sinon'); + +const WORKER_PATH = require.resolve('../../lib/nodejs/worker.js'); + +describe('worker', function() { + let worker; + let sandbox; + let stubs; + + beforeEach(function() { + sandbox = createSandbox(); + stubs = { + workerpool: { + isMainThread: false, + worker: sandbox.stub() + } + }; + sandbox.spy(process, 'removeAllListeners'); + }); + + describe('when run as main process', function() { + it('should throw', function() { + expect(() => { + rewiremock.proxy(WORKER_PATH, { + workerpool: { + isMainThread: true, + worker: stubs.workerpool.worker + } + }); + }, 'to throw'); + }); + }); + + describe('when run as worker process', function() { + let mocha; + + beforeEach(function() { + mocha = { + addFile: sandbox.stub().returnsThis(), + loadFilesAsync: sandbox.stub().resolves(), + run: sandbox.stub().callsArgAsync(0), + unloadFiles: sandbox.stub().returnsThis() + }; + stubs.Mocha = Object.assign(sandbox.stub().returns(mocha), { + bdd: sandbox.stub(), + interfaces: {} + }); + + stubs.serializer = { + serialize: sandbox.stub() + }; + + stubs.runHelpers = { + handleRequires: sandbox.stub(), + validatePlugin: sandbox.stub(), + loadRootHooks: sandbox.stub().resolves() + }; + + worker = rewiremock.proxy(WORKER_PATH, { + workerpool: stubs.workerpool, + '../../lib/mocha': stubs.Mocha, + '../../lib/nodejs/serializer': stubs.serializer, + '../../lib/cli/run-helpers': stubs.runHelpers + }); + }); + + it('should register itself with workerpool', function() { + expect(stubs.workerpool.worker, 'to have a call satisfying', [ + {run: worker.run} + ]); + }); + + describe('function', function() { + describe('run()', function() { + describe('when called without arguments', function() { + it('should reject', async function() { + return expect(worker.run, 'to be rejected with error satisfying', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + describe('when passed a non-string `options` value', function() { + it('should reject', async function() { + return expect( + () => worker.run('foo.js', 42), + 'to be rejected with error satisfying', + { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + } + ); + }); + }); + + describe('when passed an invalid string `options` value', function() { + it('should reject', async function() { + return expect( + () => worker.run('foo.js', 'tomfoolery'), + 'to be rejected with error satisfying', + { + code: 'ERR_MOCHA_INVALID_ARG_VALUE' + } + ); + }); + }); + + describe('when called with empty "filepath" argument', function() { + it('should reject', async function() { + return expect( + () => worker.run(''), + 'to be rejected with error satisfying', + { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + } + ); + }); + }); + + describe('when the file at "filepath" argument is unloadable', function() { + it('should reject', async function() { + mocha.loadFilesAsync.rejects(); + return expect( + () => worker.run('some-non-existent-file.js'), + 'to be rejected' + ); + }); + }); + + describe('when the file at "filepath" is loadable', function() { + let result; + beforeEach(function() { + result = SerializableWorkerResult.create(); + + mocha.loadFilesAsync.resolves(); + mocha.run.yields(result); + }); + + it('should handle "--require"', async function() { + await worker.run( + 'some-file.js', + serializeJavascript({require: 'foo'}) + ); + expect( + stubs.runHelpers.handleRequires, + 'to have a call satisfying', + ['foo'] + ).and('was called once'); + }); + + it('should handle "--ui"', async function() { + const argv = {foo: 'bar'}; + await worker.run('some-file.js', serializeJavascript(argv)); + + expect( + stubs.runHelpers.validatePlugin, + 'to have a call satisfying', + [argv, 'ui', stubs.Mocha.interfaces] + ).and('was called once'); + }); + + it('should call Mocha#run', async function() { + await worker.run('some-file.js'); + expect(mocha.run, 'was called once'); + }); + + it('should remove all uncaughtException listeners', async function() { + await worker.run('some-file.js'); + expect(process.removeAllListeners, 'to have a call satisfying', [ + 'uncaughtException' + ]); + }); + + describe('when serialization succeeds', function() { + beforeEach(function() { + stubs.serializer.serialize.returnsArg(0); + }); + + it('should resolve with a SerializedWorkerResult', async function() { + return expect( + worker.run('some-file.js'), + 'to be fulfilled with', + result + ); + }); + }); + + describe('when serialization fails', function() { + beforeEach(function() { + stubs.serializer.serialize.throws(); + }); + + it('should reject', async function() { + return expect(worker.run('some-file.js'), 'to be rejected'); + }); + }); + + describe('when run twice', function() { + it('should initialize only once', async function() { + await worker.run('some-file.js'); + await worker.run('some-other-file.js'); + + expect(stubs.runHelpers, 'to satisfy', { + handleRequires: expect.it('was called once'), + validatePlugin: expect.it('was called once') + }); + }); + }); + }); + }); + }); + }); + + afterEach(function() { + sandbox.restore(); + // this is needed due to `require.cache` getting dumped in watch mode + process.removeAllListeners('beforeExit'); + }); +}); diff --git a/test/reporters/tap.spec.js b/test/reporters/tap.spec.js index f3bfe8d473..01294e0430 100644 --- a/test/reporters/tap.spec.js +++ b/test/reporters/tap.spec.js @@ -40,29 +40,17 @@ describe('TAP reporter', function() { describe('event handlers', function() { describe("on 'start' event", function() { var expectedSuite = 'some suite'; - var expectedTotal = 10; - var expectedString; var stdout = []; before(function() { var runner = createMockRunner('start', EVENT_RUN_BEGIN); runner.suite = expectedSuite; - runner.grepTotal = function(string) { - expectedString = string; - return expectedTotal; - }; stdout = runReporter({}, runner, options); }); it('should not write a TAP specification version', function() { expect(stdout, 'not to contain', 'TAP version'); }); - - it('should write the number of tests that it plans to run', function() { - var expectedArray = ['1..' + expectedTotal + '\n']; - expect(stdout, 'to equal', expectedArray); - expect(expectedString, 'to be', expectedSuite); - }); }); describe("on 'pending' event", function() { @@ -78,7 +66,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -102,7 +89,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -141,7 +127,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -171,7 +156,6 @@ describe('TAP reporter', function() { error ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -209,7 +193,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -245,7 +228,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -271,11 +253,10 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); - it('should write total tests, passes, and failures', function() { + it('should write total tests, passes, failures, & plan', function() { var numberOfPasses = 1; var numberOfFails = 1; var totalTests = numberOfPasses + numberOfFails; @@ -284,7 +265,8 @@ describe('TAP reporter', function() { 'not ok ' + numberOfFails + ' ' + expectedTitle + '\n', '# tests ' + totalTests + '\n', '# pass ' + numberOfPasses + '\n', - '# fail ' + numberOfFails + '\n' + '# fail ' + numberOfFails + '\n', + '1..' + totalTests + '\n' ]; expect(stdout, 'to equal', expectedArray); }); @@ -302,17 +284,11 @@ describe('TAP reporter', function() { describe('event handlers', function() { describe("on 'start' event", function() { var expectedSuite = 'some suite'; - var expectedTotal = 10; - var expectedString; var stdout; before(function() { var runner = createMockRunner('start', EVENT_RUN_BEGIN); runner.suite = expectedSuite; - runner.grepTotal = function(string) { - expectedString = string; - return expectedTotal; - }; stdout = runReporter({}, runner, options); }); @@ -321,12 +297,6 @@ describe('TAP reporter', function() { var expectedFirstLine = 'TAP version ' + tapVersion + '\n'; expect(stdout[0], 'to equal', expectedFirstLine); }); - - it('should write the number of tests that it plans to run', function() { - var expectedSecondLine = '1..' + expectedTotal + '\n'; - expect(stdout[1], 'to equal', expectedSecondLine); - expect(expectedString, 'to be', expectedSuite); - }); }); describe("on 'pending' event", function() { @@ -342,7 +312,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -366,7 +335,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -405,7 +373,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -438,7 +405,6 @@ describe('TAP reporter', function() { error ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -479,7 +445,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -519,7 +484,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -545,11 +509,10 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); - it('should write total tests, passes, and failures', function() { + it('should write total tests, passes, failures & plan', function() { var numberOfPasses = 1; var numberOfFails = 1; var totalTests = numberOfPasses + numberOfFails; @@ -558,7 +521,8 @@ describe('TAP reporter', function() { 'not ok ' + numberOfFails + ' ' + expectedTitle + '\n', '# tests ' + totalTests + '\n', '# pass ' + numberOfPasses + '\n', - '# fail ' + numberOfFails + '\n' + '# fail ' + numberOfFails + '\n', + '1..' + totalTests + '\n' ]; expect(stdout, 'to equal', expectedArray); }); diff --git a/test/unit/errors.spec.js b/test/unit/errors.spec.js index 51d066ef98..3c1adee505 100644 --- a/test/unit/errors.spec.js +++ b/test/unit/errors.spec.js @@ -3,26 +3,63 @@ var errors = require('../../lib/errors'); describe('Errors', function() { - var expectedMessage = 'some message'; - it('should include expected code in thrown reporter errors', function() { - var throwError = function() { - throw errors.createInvalidReporterError(expectedMessage, 'badReporter'); - }; - expect(throwError, 'to throw', { - message: expectedMessage, - code: 'ERR_MOCHA_INVALID_REPORTER', - reporter: 'badReporter' + var message = 'some message'; + + describe('createInvalidReporterError()', function() { + it('should include expected code in thrown reporter errors', function() { + expect( + errors.createInvalidReporterError(message, 'badReporter'), + 'to satisfy', + { + message: message, + code: 'ERR_MOCHA_INVALID_REPORTER', + reporter: 'badReporter' + } + ); + }); + }); + + describe('createInvalidInterfaceError()', function() { + it('should include expected code in thrown interface errors', function() { + expect( + errors.createInvalidInterfaceError(message, 'badUi'), + 'to satisfy', + { + message: message, + code: 'ERR_MOCHA_INVALID_INTERFACE', + interface: 'badUi' + } + ); }); }); - it('should include expected code in thrown interface errors', function() { - var throwError = function() { - throw errors.createInvalidInterfaceError(expectedMessage, 'badUi'); - }; - expect(throwError, 'to throw', { - message: expectedMessage, - code: 'ERR_MOCHA_INVALID_INTERFACE', - interface: 'badUi' + describe('createForbiddenExclusivityError()', function() { + describe('when Mocha instance is running in a worker process', function() { + it('should output a message regarding incompatibility', function() { + var mocha = {isWorker: true}; + expect( + errors.createForbiddenExclusivityError(mocha, {}), + 'to satisfy', + { + message: /parallel/, + code: errors.constants.FORBIDDEN_EXCLUSIVITY + } + ); + }); + }); + + describe('when Mocha instance is not running in a worker process', function() { + it('should output a message regarding --forbid-only', function() { + var mocha = {}; + expect( + errors.createForbiddenExclusivityError(mocha, {}), + 'to satisfy', + { + message: /--forbid-only/, + code: errors.constants.FORBIDDEN_EXCLUSIVITY + } + ); + }); }); }); }); diff --git a/test/unit/hook-timeout.spec.js b/test/unit/hook-timeout.spec.js index 8c1b1f4735..12d9bdc1a3 100644 --- a/test/unit/hook-timeout.spec.js +++ b/test/unit/hook-timeout.spec.js @@ -1,9 +1,11 @@ 'use strict'; -before(function(done) { - setTimeout(done, 100); -}); +describe('hook timeout', function() { + before(function(done) { + setTimeout(done, 100); + }); -it('should work', function(done) { - done(); + it('should work', function(done) { + done(); + }); }); diff --git a/test/unit/mocha.spec.js b/test/unit/mocha.spec.js index 5fb0263ec1..64cb7e9b07 100644 --- a/test/unit/mocha.spec.js +++ b/test/unit/mocha.spec.js @@ -51,7 +51,13 @@ describe('Mocha', function() { sandbox = sinon.createSandbox(); reporterInstance = {}; opts = {reporter: sandbox.stub().returns(reporterInstance)}; - Base = sandbox.stub().returns({}); + + // NOTE: calling `stub(someObject, someFunction)` where `someFunction` has + // its own static properties WILL NOT blast those static properties! + Base = sandbox.stub(Mocha.reporters, 'Base').returns({}); + sandbox.stub(Mocha.reporters, 'base').returns({}); + sandbox.stub(Mocha.reporters, 'spec').returns({}); + runner = utils.assign(sandbox.createStubInstance(EventEmitter), { run: sandbox .stub() @@ -61,7 +67,7 @@ describe('Mocha', function() { grep: sandbox.stub(), dispose: sandbox.stub() }); - Runner = sandbox.stub().returns(runner); + Runner = sandbox.stub(Mocha, 'Runner').returns(runner); // the Runner constructor is the main export, and constants is a static prop. // we don't need the constants themselves, but the object cannot be undefined Runner.constants = {}; @@ -72,18 +78,13 @@ describe('Mocha', function() { dispose: sandbox.stub(), reset: sandbox.stub() }); - Suite = sandbox.stub().returns(suite); + Suite = sandbox.stub(Mocha, 'Suite').returns(suite); Suite.constants = {}; sandbox.stub(utils, 'supportsEsModules').returns(false); sandbox.stub(utils, 'warn'); sandbox.stub(utils, 'isString'); sandbox.stub(utils, 'noop'); - - Mocha.Runner = Runner; - Mocha.reporters.Base = Mocha.reporters.base = Base; - sandbox.stub(Mocha.reporters, 'spec'); - Mocha.Suite = Suite; }); afterEach(function() { @@ -101,6 +102,7 @@ describe('Mocha', function() { sandbox.stub(Mocha.prototype, 'global').returnsThis(); sandbox.stub(Mocha.prototype, 'retries').returnsThis(); sandbox.stub(Mocha.prototype, 'rootHooks').returnsThis(); + sandbox.stub(Mocha.prototype, 'parallelMode').returnsThis(); }); it('should set _cleanReferencesAfterRun to true', function() { @@ -162,6 +164,26 @@ describe('Mocha', function() { ]).and('was called once'); }); }); + + describe('when `parallel` option is true', function() { + describe('and `jobs` option > 1', function() { + it('should enable parallel mode', function() { + // eslint-disable-next-line no-new + new Mocha({parallel: true, jobs: 2}); + expect(Mocha.prototype.parallelMode, 'to have a call satisfying', [ + true + ]).and('was called once'); + }); + }); + + describe('and `jobs` option <= 1', function() { + it('should not enable parallel mode', function() { + // eslint-disable-next-line no-new + new Mocha({parallel: true, jobs: 1}); + expect(Mocha.prototype.parallelMode, 'was not called'); + }); + }); + }); }); describe('instance method', function() { @@ -546,7 +568,7 @@ describe('Mocha', function() { describe('when Mocha is set to lazily load files', function() { beforeEach(function() { - mocha.loadAsync = true; + mocha.lazyLoadFiles(true); }); it('should not eagerly load files', function(done) { @@ -635,7 +657,7 @@ describe('Mocha', function() { it('should configure the Base reporter', function(done) { mocha.run(function() { - expect(Base, 'to exhaustively satisfy', { + expect(Base, 'to satisfy', { inlineDiffs: 'some value', hideDiff: true, useColors: 'truthy' @@ -647,7 +669,7 @@ describe('Mocha', function() { it('should configure the Base reporter', function(done) { mocha.run(function() { - expect(Base, 'to exhaustively satisfy', { + expect(Base, 'to satisfy', { inlineDiffs: 'some value', hideDiff: true }); @@ -695,8 +717,6 @@ describe('Mocha', function() { }, 'to throw', { - message: - 'Mocha instance is currently running tests, cannot start a next test run until this one is done', code: 'ERR_MOCHA_INSTANCE_ALREADY_RUNNING', instance: mocha } @@ -726,8 +746,6 @@ describe('Mocha', function() { }, 'to throw', { - message: - 'Mocha instance is already disposed, cannot start a new test run. Please create a new mocha instance. Be sure to set disable `cleanReferencesAfterRun` when you want to reuse the same mocha instance for multiple test runs.', code: 'ERR_MOCHA_INSTANCE_ALREADY_DISPOSED', cleanReferencesAfterRun: true, instance: mocha @@ -760,8 +778,6 @@ describe('Mocha', function() { }, 'to throw', { - message: - 'Mocha instance is already disposed, cannot start a new test run. Please create a new mocha instance. Be sure to set disable `cleanReferencesAfterRun` when you want to reuse the same mocha instance for multiple test runs.', code: 'ERR_MOCHA_INSTANCE_ALREADY_DISPOSED', instance: mocha } @@ -818,23 +834,21 @@ describe('Mocha', function() { }); }); - describe('unloadFiles()', function() { - it('should reset referencesCleaned and allow for next run', function(done) { - mocha.run(function() { - mocha.unloadFiles(); - mocha.run(done); + describe('parallelMode()', function() { + describe('when `Mocha` is running in a browser', function() { + beforeEach(function() { + sandbox.stub(utils, 'isBrowser').returns(true); }); - }); - it('should not be allowed when the current instance is already disposed', function() { - mocha.dispose(); - expect( - function() { - mocha.unloadFiles(); - }, - 'to throw', - 'Mocha instance is already disposed, it cannot be used again.' - ); + it('should throw', function() { + expect( + function() { + mocha.parallelMode(); + }, + 'to throw', + {code: 'ERR_MOCHA_UNSUPPORTED'} + ); + }); }); }); });