From ef40c9ee853250c97d29b14d9cf0dae5b4c03ad5 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 2 Mar 2020 17:02:30 -0800 Subject: [PATCH 01/92] WIP: concurrency based on worker threads; see #2839 [ci skip] --- .eslintrc.yml | 25 ++++---- lib/buffered-runner.js | 75 ++++++++++++++++++++++ lib/cli/run-helpers.js | 22 +++++++ lib/cli/run-option-metadata.js | 5 +- lib/cli/run.js | 13 ++++ lib/mocha.js | 11 +++- lib/reporters/buffered.js | 114 +++++++++++++++++++++++++++++++++ lib/worker.js | 39 +++++++++++ package.json | 1 + 9 files changed, 290 insertions(+), 15 deletions(-) create mode 100644 lib/buffered-runner.js create mode 100644 lib/reporters/buffered.js create mode 100644 lib/worker.js diff --git a/.eslintrc.yml b/.eslintrc.yml index 34c77cabe4..883d122818 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -16,17 +16,20 @@ rules: - safe overrides: - files: - - scripts/**/*.js - - package-scripts.js - - karma.conf.js - - .wallaby.js - - .eleventy.js - - bin/* - - lib/cli/**/*.js - - test/node-unit/**/*.js - - test/integration/options/watch.spec.js - - test/integration/helpers.js - - lib/growl.js + - 'scripts/**/*.js' + - 'package-scripts.js' + - 'karma.conf.js' + - '.wallaby.js' + - '.eleventy.js' + - 'bin/*' + - 'lib/cli/**/*.js' + - 'test/node-unit/**/*.js' + - 'test/integration/options/watch.spec.js' + - 'test/integration/helpers.js' + - 'lib/growl.js' + - 'lib/buffered-runner.js' + - 'lib/worker.js' + - 'lib/reporters/buffered.js' parserOptions: ecmaVersion: 2018 env: diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js new file mode 100644 index 0000000000..7a01b90676 --- /dev/null +++ b/lib/buffered-runner.js @@ -0,0 +1,75 @@ +'use strict'; + +const Runner = require('./runner'); +const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; +const {spawn, Pool, Worker} = require('threads'); +const debug = require('debug')('mocha:buffered-runner'); + +/** + * This `Runner` delegates tests runs to worker threads. Does not execute any + * {@link Runnable}s by itself! + */ +class BufferedRunner extends Runner { + /** + * Runs Mocha tests by creating a thread pool, then delegating work to the + * worker threads. Each worker receives one file, and as workers become + * available, they take a file from the queue and run it. + * The worker thread execution is treated like an RPC--it returns a `Promise` + * containing serialized information about the run. The information is processed + * as it's received, and emitted to a {@link Reporter}, which is likely listening + * for these events. + * + * @todo handle tests in a specific order, e.g., via `--file`? + * @todo handle delayed runs? + * @todo graceful failure + * @todo audit `BufferedEvent` objects; e.g. do tests need a `parent` prop? + * @todo should we just instantiate a `Test` object from the `BufferedEvent`? + * @param {Function} callback - Called with an exit code corresponding to + * number of test failures. + * @param {Object} options + * @param {string[]} options.files - List of test files + * @param {Options} option.opts - Command-line options + * @returns {Promise} + */ + async run(callback, {files, opts}) { + const pool = Pool(() => spawn(new Worker('./worker.js')), opts.jobs); + + let exitCode = 0; + + this.emit(EVENT_RUN_BEGIN); + + files.forEach(file => { + debug('enqueueing test file %s', file); + pool.queue(async run => { + const [failures, events] = await run(file, opts); + debug( + 'completed run of file %s; %d failures / %d events', + file, + failures, + events.length + ); + exitCode += failures; // can this be non-numeric? + events.forEach(({name, data}) => { + Object.keys(data).forEach(key => { + if (key.startsWith('__')) { + data[key.slice(2)] = () => data[key]; + } + }); + // maybe we should just expect `err` separately from the worker. + if (data.err) { + this.emit(name, data, data.err); + } else { + this.emit(name, data); + } + }); + }); + }); + + await pool.settled(); // nonzero exit code if rejection? + await pool.terminate(); + this.emit(EVENT_RUN_END); + callback(exitCode); + } +} + +module.exports = BufferedRunner; diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index 72823c48f6..25000e8f9b 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -108,6 +108,25 @@ const singleRun = async (mocha, {exit}, fileCollectParams) => { return mocha.run(exit ? exitMocha : exitMochaLater); }; +/** + * Collect files and run tests (using `BufferedRunner`) + * @param {Mocha} mocha - Mocha instance + * @param {Options} opts - Command line options + * @param {Object} fileCollectParams - Parameters that control test + * file collection. See `lib/cli/collect-files.js`. + * @returns {Promise} + * @private + */ +const parallelRun = async (mocha, opts, fileCollectParams) => { + const files = collectFiles(fileCollectParams); + const {jobs} = opts; + debug( + `executing ${files.length} test file(s) across ${jobs} concurrent jobs` + ); + + return mocha.run(opts.exit ? exitMocha : exitMochaLater, {files, opts}); +}; + /** * Actually run tests * @param {Mocha} mocha - Mocha instance @@ -122,6 +141,7 @@ exports.runMocha = async (mocha, options) => { exit = false, ignore = [], file = [], + parallel = false, recursive = false, sort = false, spec = [], @@ -140,6 +160,8 @@ exports.runMocha = async (mocha, options) => { if (watch) { watchRun(mocha, {watchFiles, watchIgnore}, fileCollectParams); + } else if (parallel) { + await parallelRun(mocha, options, fileCollectParams); } else { await singleRun(mocha, {exit}, fileCollectParams); } diff --git a/lib/cli/run-option-metadata.js b/lib/cli/run-option-metadata.js index 4648d9fbfe..da3b7d995d 100644 --- a/lib/cli/run-option-metadata.js +++ b/lib/cli/run-option-metadata.js @@ -42,11 +42,12 @@ exports.types = { 'list-interfaces', 'list-reporters', 'no-colors', + 'parallel', 'recursive', 'sort', 'watch' ], - number: ['retries'], + number: ['retries', 'jobs'], string: [ 'config', 'fgrep', @@ -75,7 +76,9 @@ exports.aliases = { growl: ['G'], ignore: ['exclude'], invert: ['i'], + jobs: ['j'], 'no-colors': ['C'], + parallel: ['p'], reporter: ['R'], 'reporter-option': ['reporter-options', 'O'], require: ['r'], diff --git a/lib/cli/run.js b/lib/cli/run.js index d024cbb0f2..1c52e7f634 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -24,6 +24,7 @@ const {ONE_AND_DONES, ONE_AND_DONE_ARGS} = require('./one-and-dones'); const debug = require('debug')('mocha:cli:run'); const defaults = require('../mocharc'); const {types, aliases} = require('./run-option-metadata'); +const coreCount = require('os').cpus().length; /** * Logical option groups @@ -150,6 +151,14 @@ exports.builder = yargs => description: 'Inverts --grep and --fgrep matches', group: GROUPS.FILTERS }, + jobs: { + description: 'Number of concurrent jobs', + implies: 'parallel', + defaultDescription: `CPU core count (${coreCount})`, + requiresArg: true, + group: GROUPS.RULES, + coerce: value => (typeof value === 'undefined' ? coreCount : value) + }, 'list-interfaces': { conflicts: Array.from(ONE_AND_DONE_ARGS), description: 'List built-in user interfaces & exit' @@ -169,6 +178,10 @@ exports.builder = yargs => normalize: true, requiresArg: true }, + parallel: { + description: 'Run tests in parallel', + group: GROUPS.RULES + }, recursive: { description: 'Look for tests in subdirectories', group: GROUPS.FILES diff --git a/lib/mocha.js b/lib/mocha.js index 017daa1e2c..e142ab1229 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -90,6 +90,7 @@ exports.Test = require('./test'); * @param {number} [options.slow] - Slow threshold value. * @param {number|string} [options.timeout] - Timeout threshold value. * @param {string} [options.ui] - Interface name. + * @param {boolean} [options.parallel] - Run jobs in parallel */ function Mocha(options) { options = utils.assign({}, mocharc, options || {}); @@ -136,6 +137,10 @@ function Mocha(options) { this[opt](); } }, this); + + this._runner = options.parallel + ? require('./buffered-runner') + : exports.Runner; } /** @@ -824,14 +829,14 @@ Object.defineProperty(Mocha.prototype, 'version', { * // exit with non-zero status if there were test failures * mocha.run(failures => process.exitCode = failures ? 1 : 0); */ -Mocha.prototype.run = function(fn) { +Mocha.prototype.run = function(fn, runOptions) { if (this.files.length && !this.loadAsync) { this.loadFiles(); } var suite = this.suite; var options = this.options; options.files = this.files; - var runner = new exports.Runner(suite, options.delay); + var runner = new this._runner(suite, options.delay); createStatsCollector(runner); var reporter = new this._reporter(runner, options); runner.checkLeaks = options.checkLeaks === true; @@ -864,5 +869,5 @@ Mocha.prototype.run = function(fn) { } } - return runner.run(done); + return runner.run(done, runOptions); }; diff --git a/lib/reporters/buffered.js b/lib/reporters/buffered.js new file mode 100644 index 0000000000..b13df9fa20 --- /dev/null +++ b/lib/reporters/buffered.js @@ -0,0 +1,114 @@ +'use strict'; +/** + * @module Buffered + */ +/** + * Module dependencies. + */ + +const { + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_PENDING +} = require('../runner').constants; + +/** + * Creates a {@link BufferedEvent} from a {@link Suite}. + * @param {string} evt - Event name + * @param {Suite} suite - Suite object + * @returns {BufferedEvent} + */ +const serializeSuite = (evt, suite) => ({ + name: evt, + data: {root: suite.root, title: suite.title} +}); + +/** + * Creates a {@link BufferedEvent} from a {@link Test}. + * @param {string} evt - Event name + * @param {Test} test - Test object + * @param {any} err - Error, if applicable + */ +const serializeTest = (evt, test, [err]) => { + const obj = { + title: test.title, + duration: test.duration, + err: test.err, + __fullTitle: test.fullTitle(), + __slow: test.slow(), + __titlePath: test.titlePath() + }; + if (err) { + obj.err = + test.err && err instanceof Error + ? { + multiple: [...(test.err.multiple || []), err] + } + : err; + } + return { + name: evt, + data: obj + }; +}; + +/** + * The `Buffered` reporter is for use by parallel runs. Instead of outputting + * to `STDOUT`, etc., it retains a list of events it receives and hands these + * off to the callback passed into {@link Mocha#run}. That callback will then + * return the data to the main process. + */ +class Buffered { + /** + * Listens for {@link Runner} events and retains them in an `events` instance prop. + * @param {Runner} runner + */ + constructor(runner) { + /** + * Retained list of events emitted from the {@link Runner} instance. + * @type {BufferedEvent[]} + */ + const events = (this.events = []); + + runner + .on(EVENT_SUITE_BEGIN, suite => { + events.push(serializeSuite(EVENT_SUITE_BEGIN, suite)); + }) + .on(EVENT_SUITE_END, suite => { + events.push(serializeSuite(EVENT_SUITE_END, suite)); + }) + .on(EVENT_TEST_PENDING, test => { + events.push(serializeTest(EVENT_TEST_PENDING, test)); + }) + .on(EVENT_TEST_FAIL, (test, err) => { + events.push(serializeTest(EVENT_TEST_FAIL, test, err)); + }) + .on(EVENT_TEST_PASS, test => { + events.push(serializeTest(EVENT_TEST_PASS, test)); + }); + } + + /** + * Calls the {@link Mocha#run} callback (`callback`) with the test failure + * count and the array of {@link BufferedEvent} objects. Resets the array. + * @param {number} failures - Number of failed tests + * @param {Function} callback - The callback passed to {@link Mocha#run}. + */ + done(failures, callback) { + callback(failures, [...this.events]); + this.events = []; + } +} + +/** + * Serializable event data from a `Runner`. Keys of the `data` property + * beginning with `__` will be converted into a function which returns the value + * upon deserialization. + * @typedef {Object} BufferedEvent + * @property {string} name - Event name + * @property {object} data - Event parameters + */ + +module.exports = Buffered; diff --git a/lib/worker.js b/lib/worker.js new file mode 100644 index 0000000000..84d80e9c31 --- /dev/null +++ b/lib/worker.js @@ -0,0 +1,39 @@ +'use strict'; + +const {expose} = require('threads/worker'); +const Mocha = require('./mocha'); +const {handleRequires, validatePlugin} = require('./cli/run-helpers'); + +let bootstrapped = false; + +/** + * Runs a single test file in a worker thread. + * @param {string} file - Filepath of test file + * @param {Options} argv - Parsed command-line options object + * @returns {Promise<[number, BufferedEvent[]]>} A tuple of failures and + * serializable event data + */ +async function run(file, argv) { + // the buffered reporter retains its events; these events are returned + // from this function back to the main process. + argv.reporter = require.resolve('./reporters/buffered'); + // if these were set, it would cause infinite recursion by spawning another worker + delete argv.parallel; + delete argv.jobs; + if (!bootstrapped) { + // setup requires and ui, but only do this once--we will reuse this worker! + handleRequires(argv.require); + validatePlugin(argv, 'ui', Mocha.interfaces); + bootstrapped = true; + } + const mocha = new Mocha(argv); + mocha.files = [file]; + await mocha.loadFilesAsync(); + return new Promise(resolve => { + mocha.run((failures, events) => { + resolve([failures, events]); + }); + }); +} + +expose(run); diff --git a/package.json b/package.json index d2a133a380..24be5c2067 100644 --- a/package.json +++ b/package.json @@ -60,6 +60,7 @@ "object.assign": "4.1.0", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", + "threads": "^1.3.0", "which": "2.0.2", "wide-align": "1.1.3", "yargs": "13.3.2", From 0a62f3f0db8d41b422f21d80854374043f46e11b Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 12 Mar 2020 14:17:31 -0700 Subject: [PATCH 02/92] somewhat reasonable re-implementation using child processes --- .eslintrc.yml | 1 + .mocharc.yml | 4 +- lib/buffered-runner.js | 87 +++++++++++++------- lib/cli/run.js | 5 +- lib/hook.js | 17 ++++ lib/reporters/base.js | 11 --- lib/reporters/buffered.js | 108 +++++++++++-------------- lib/runner.js | 8 ++ lib/serializer.js | 144 +++++++++++++++++++++++++++++++++ lib/suite.js | 14 ++++ lib/test.js | 24 ++++++ lib/worker.js | 40 +++++++-- package.json | 2 +- test/integration/diffs.spec.js | 2 +- test/integration/helpers.js | 17 +++- test/unit/hook-timeout.spec.js | 3 +- test/unit/throw.spec.js | 1 + 17 files changed, 374 insertions(+), 114 deletions(-) create mode 100644 lib/serializer.js diff --git a/.eslintrc.yml b/.eslintrc.yml index 883d122818..a90505535e 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -30,6 +30,7 @@ overrides: - 'lib/buffered-runner.js' - 'lib/worker.js' - 'lib/reporters/buffered.js' + - 'lib/serializer.js' parserOptions: ecmaVersion: 2018 env: diff --git a/.mocharc.yml b/.mocharc.yml index fc4c97339c..4a05397bee 100644 --- a/.mocharc.yml +++ b/.mocharc.yml @@ -4,4 +4,6 @@ global: - okGlobalA,okGlobalB - okGlobalC - callback* -timeout: 300 +timeout: 500 +parallel: true +# bail: true diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 7a01b90676..775fc6c2c0 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -1,9 +1,11 @@ 'use strict'; +const os = require('os'); const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; -const {spawn, Pool, Worker} = require('threads'); const debug = require('debug')('mocha:buffered-runner'); +const workerpool = require('workerpool'); +const {deserializeMessage} = require('./serializer'); /** * This `Runner` delegates tests runs to worker threads. Does not execute any @@ -32,42 +34,73 @@ class BufferedRunner extends Runner { * @returns {Promise} */ async run(callback, {files, opts}) { - const pool = Pool(() => spawn(new Worker('./worker.js')), opts.jobs); + const jobs = opts.jobs || os.cpus().length - 1; + debug('starting pool with %d max workers', jobs); + const pool = workerpool.pool(require.resolve('./worker.js'), { + workerType: 'process', + maxWorkers: jobs + }); let exitCode = 0; + let aborted = false; this.emit(EVENT_RUN_BEGIN); - files.forEach(file => { - debug('enqueueing test file %s', file); - pool.queue(async run => { - const [failures, events] = await run(file, opts); - debug( - 'completed run of file %s; %d failures / %d events', - file, - failures, - events.length - ); - exitCode += failures; // can this be non-numeric? - events.forEach(({name, data}) => { - Object.keys(data).forEach(key => { - if (key.startsWith('__')) { - data[key.slice(2)] = () => data[key]; + const poolProxy = await pool.proxy(); + // const tasks = new Set( + const results = await Promise.allSettled( + files.map(async file => { + debug('enqueueing test file %s', file); + try { + const {failures, events} = deserializeMessage( + await poolProxy.run(file, opts) + ); + debug( + 'completed run of file %s; %d failures / %d events', + file, + failures, + events.length + ); + exitCode += failures; // can this be non-numeric? + let event = events.shift(); + let shouldAbort = false; + while (event) { + this.emit(event.eventName, event.data, event.error); + if (failures && event.data._bail) { + debug('terminating pool due to "bail" flag'); + shouldAbort = true; } - }); - // maybe we should just expect `err` separately from the worker. - if (data.err) { - this.emit(name, data, data.err); - } else { - this.emit(name, data); + event = events.shift(); } - }); - }); - }); + if (shouldAbort) { + aborted = true; + await pool.terminate(true); + } + } catch (err) { + if (!aborted) { + debug('terminating pool due to uncaught exception'); + await pool.terminate(true); + this.uncaught(err); + } + } finally { + debug('done running file %s', file); + } + }) + ); - await pool.settled(); // nonzero exit code if rejection? await pool.terminate(); + + const uncaughtExceptions = results.filter( + ({status}) => status === 'rejected' + ); + if (uncaughtExceptions.length) { + debug('found %d uncaught exceptions', uncaughtExceptions.length); + process.nextTick(() => { + throw uncaughtExceptions.shift(); + }); + } this.emit(EVENT_RUN_END); + debug('exiting with code %d', exitCode); callback(exitCode); } } diff --git a/lib/cli/run.js b/lib/cli/run.js index 1c52e7f634..84e358b3fe 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -154,10 +154,11 @@ exports.builder = yargs => jobs: { description: 'Number of concurrent jobs', implies: 'parallel', - defaultDescription: `CPU core count (${coreCount})`, + defaultDescription: `CPU core count (${coreCount}) -`, requiresArg: true, group: GROUPS.RULES, - coerce: value => (typeof value === 'undefined' ? coreCount : value) + coerce: value => + Number(typeof value === 'undefined' ? coreCount - 1 : value) }, 'list-interfaces': { conflicts: Array.from(ONE_AND_DONE_ARGS), diff --git a/lib/hook.js b/lib/hook.js index 71440d23d0..7936cda495 100644 --- a/lib/hook.js +++ b/lib/hook.js @@ -44,3 +44,20 @@ Hook.prototype.error = function(err) { this._error = err; }; + +Hook.prototype.serialize = function serialize() { + return Object.freeze({ + $$titlePath: this.titlePath(), + ctx: { + currentTest: { + title: this.ctx && this.ctx.currentTest && this.ctx.currentTest.title + } + }, + parent: { + root: this.parent.root, + title: this.parent.title + }, + title: this.title, + type: this.type + }); +}; diff --git a/lib/reporters/base.js b/lib/reporters/base.js index ea259445e3..50b5f63c96 100644 --- a/lib/reporters/base.js +++ b/lib/reporters/base.js @@ -12,7 +12,6 @@ var milliseconds = require('ms'); var utils = require('../utils'); var supportsColor = process.browser ? null : require('supports-color'); var constants = require('../runner').constants; -var EVENT_TEST_PASS = constants.EVENT_TEST_PASS; var EVENT_TEST_FAIL = constants.EVENT_TEST_FAIL; /** @@ -304,16 +303,6 @@ function Base(runner, options) { this.runner = runner; this.stats = runner.stats; // assigned so Reporters keep a closer reference - runner.on(EVENT_TEST_PASS, function(test) { - if (test.duration > test.slow()) { - test.speed = 'slow'; - } else if (test.duration > test.slow() / 2) { - test.speed = 'medium'; - } else { - test.speed = 'fast'; - } - }); - runner.on(EVENT_TEST_FAIL, function(test, err) { if (showDiff(err)) { stringifyDiffObjs(err); diff --git a/lib/reporters/buffered.js b/lib/reporters/buffered.js index b13df9fa20..8bf21da320 100644 --- a/lib/reporters/buffered.js +++ b/lib/reporters/buffered.js @@ -11,48 +11,19 @@ const { EVENT_SUITE_END, EVENT_TEST_FAIL, EVENT_TEST_PASS, - EVENT_TEST_PENDING + EVENT_TEST_PENDING, + EVENT_TEST_BEGIN, + EVENT_TEST_END, + EVENT_TEST_RETRY, + EVENT_DELAY_BEGIN, + EVENT_DELAY_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END, + EVENT_RUN_END } = require('../runner').constants; - -/** - * Creates a {@link BufferedEvent} from a {@link Suite}. - * @param {string} evt - Event name - * @param {Suite} suite - Suite object - * @returns {BufferedEvent} - */ -const serializeSuite = (evt, suite) => ({ - name: evt, - data: {root: suite.root, title: suite.title} -}); - -/** - * Creates a {@link BufferedEvent} from a {@link Test}. - * @param {string} evt - Event name - * @param {Test} test - Test object - * @param {any} err - Error, if applicable - */ -const serializeTest = (evt, test, [err]) => { - const obj = { - title: test.title, - duration: test.duration, - err: test.err, - __fullTitle: test.fullTitle(), - __slow: test.slow(), - __titlePath: test.titlePath() - }; - if (err) { - obj.err = - test.err && err instanceof Error - ? { - multiple: [...(test.err.multiple || []), err] - } - : err; - } - return { - name: evt, - data: obj - }; -}; +const {SerializableEvent, SerializableWorkerResult} = require('../serializer'); +// const debug = require('debug')('mocha:reporters:buffered'); +const Base = require('./base'); /** * The `Buffered` reporter is for use by parallel runs. Instead of outputting @@ -60,34 +31,53 @@ const serializeTest = (evt, test, [err]) => { * off to the callback passed into {@link Mocha#run}. That callback will then * return the data to the main process. */ -class Buffered { +class Buffered extends Base { /** * Listens for {@link Runner} events and retains them in an `events` instance prop. * @param {Runner} runner */ - constructor(runner) { + constructor(runner, opts) { + super(runner, opts); + /** * Retained list of events emitted from the {@link Runner} instance. * @type {BufferedEvent[]} + * @memberOf Buffered */ const events = (this.events = []); - runner - .on(EVENT_SUITE_BEGIN, suite => { - events.push(serializeSuite(EVENT_SUITE_BEGIN, suite)); - }) - .on(EVENT_SUITE_END, suite => { - events.push(serializeSuite(EVENT_SUITE_END, suite)); - }) - .on(EVENT_TEST_PENDING, test => { - events.push(serializeTest(EVENT_TEST_PENDING, test)); - }) - .on(EVENT_TEST_FAIL, (test, err) => { - events.push(serializeTest(EVENT_TEST_FAIL, test, err)); - }) - .on(EVENT_TEST_PASS, test => { - events.push(serializeTest(EVENT_TEST_PASS, test)); + const listeners = new Map(); + const createListener = evt => { + const listener = (runnable, err) => { + events.push(SerializableEvent.create(evt, runnable, err)); + }; + listeners.set(evt, listener); + return listener; + }; + + [ + EVENT_DELAY_BEGIN, + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_BEGIN, + EVENT_TEST_PENDING, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_RETRY, + EVENT_TEST_END, + EVENT_DELAY_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END + ].forEach(evt => { + runner.on(evt, createListener(evt)); + }); + + runner.once(EVENT_RUN_END, () => { + listeners.forEach((listener, evt) => { + runner.removeListener(evt, listener); }); + listeners.clear(); + }); } /** @@ -97,7 +87,7 @@ class Buffered { * @param {Function} callback - The callback passed to {@link Mocha#run}. */ done(failures, callback) { - callback(failures, [...this.events]); + callback(SerializableWorkerResult.create(failures, this.events)); this.events = []; } } diff --git a/lib/runner.js b/lib/runner.js index c60e562a81..9226fb291f 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -694,6 +694,14 @@ Runner.prototype.runTests = function(suite, fn) { } test.state = STATE_PASSED; + var slow = test.slow(); + if (test.duration > slow) { + test.speed = 'slow'; + } else if (test.duration > slow / 2) { + test.speed = 'medium'; + } else { + test.speed = 'fast'; + } self.emit(constants.EVENT_TEST_PASS, test); self.emit(constants.EVENT_TEST_END, test); self.hookUp(HOOK_TYPE_AFTER_EACH, next); diff --git a/lib/serializer.js b/lib/serializer.js new file mode 100644 index 0000000000..f9058511a1 --- /dev/null +++ b/lib/serializer.js @@ -0,0 +1,144 @@ +'use strict'; + +// const debug = require('debug')('mocha:serializer'); + +class SerializableWorkerResult { + constructor(failures, events) { + this.failures = failures; + this.events = events; + this.__type = 'SerializableWorkerResult'; + } + + static create(...args) { + return new SerializableWorkerResult(...args); + } + + serialize() { + this.events.forEach(event => { + event.serialize(); + }); + return Object.freeze(this); + } + + static deserialize(obj) { + obj.events.forEach(SerializableEvent.deserialize); + return obj; + } +} + +class SerializableEvent { + constructor(eventName, rawObject, error) { + this.eventName = eventName; + if (rawObject && typeof rawObject !== 'object') { + throw new Error( + `expected object, received [${typeof rawObject}]: ${rawObject}` + ); + } + this.error = error; + // we don't want this value sent via IPC. + Object.defineProperty(this, 'rawObject', { + value: rawObject, + enumerable: false + }); + } + + static create(...args) { + return new SerializableEvent(...args); + } + + serialize() { + const createError = err => { + const _serializeError = ([value, key]) => { + if (value) { + if (typeof value[key] === 'object') { + const obj = value[key]; + Object.keys(obj) + .map(key => [obj[key], key]) + .forEach(_serializeError); + } else if (typeof value[key] === 'function') { + delete value[key]; + } + } + }; + const error = { + message: err.message, + stack: err.stack, + __type: 'Error' + }; + + Object.keys(err) + .map(key => [err[key], key]) + .forEach(_serializeError); + return error; + }; + const obj = this.rawObject; + this.data = Object.create(null); + Object.assign( + this.data, + typeof obj.serialize === 'function' ? obj.serialize() : obj + ); + Object.keys(this.data).forEach(key => { + if (this.data[key] instanceof Error) { + this.data[key] = createError(this.data[key]); + } + }); + if (this.error) { + this.error = createError(this.error); + } + return Object.freeze(this); + } + + static deserialize(obj) { + const createError = value => { + const error = new Error(value.message); + error.stack = value.stack; + Object.assign(error, value); + return error; + }; + const _deserialize = ([object, key]) => { + const value = typeof key !== 'undefined' ? object[key] : object; + if (typeof key === 'string' && key.startsWith('$$')) { + const newKey = key.slice(2); + object[newKey] = () => value; + delete object[key]; + key = newKey; + } + if (Array.isArray(value)) { + value.forEach((_, idx) => { + _deserialize([value, idx]); + }); + } else if (value && typeof value === 'object') { + if (value.__type === 'Error') { + object[key] = createError(value); + } else { + Object.keys(value) + .map(key => [value, key]) + .forEach(_deserialize); + } + } + }; + + Object.keys(obj.data) + .map(key => [obj.data, key]) + .forEach(_deserialize); + if (obj.error) { + obj.error = createError(obj.error); + } + return obj; + } +} + +exports.serializeObject = function serializeObject(obj) { + return obj instanceof SerializableWorkerResult ? obj.serialize() : obj; +}; + +exports.deserializeMessage = function deserializeMessage(message) { + return message && + typeof message === 'object' && + message.__type === 'SerializableWorkerResult' + ? SerializableWorkerResult.deserialize(message) + : message; +}; + +exports.SerializableEvent = SerializableEvent; +exports.SerializableWorkerResult = SerializableWorkerResult; diff --git a/lib/suite.js b/lib/suite.js index 191d946b50..e6e8c24413 100644 --- a/lib/suite.js +++ b/lib/suite.js @@ -549,6 +549,20 @@ Suite.prototype.cleanReferences = function cleanReferences() { } }; +/** + * Returns an object suitable for IPC. + * Functions are represented by keys beginning with `$$`. + * @returns {Object} + */ +Suite.prototype.serialize = function serialize() { + return Object.freeze({ + _bail: this._bail, + $$fullTitle: this.fullTitle(), + root: this.root, + title: this.title + }); +}; + var constants = utils.defineConstants( /** * {@link Suite}-related constants. diff --git a/lib/test.js b/lib/test.js index 65122b260c..e87a245ade 100644 --- a/lib/test.js +++ b/lib/test.js @@ -62,3 +62,27 @@ Test.prototype.clone = function() { test.ctx = this.ctx; return test; }; + +/** + * Returns an object suitable for IPC. + * Functions are represented by keys beginnin g with `$$`. + * @returns {Object} + */ +Test.prototype.serialize = function serialize() { + return Object.freeze({ + $$currentRetry: this._currentRetry, + $$fullTitle: this.fullTitle(), + $$retriedTest: this._retriedTest || null, + $$slow: this._slow, + $$titlePath: this.titlePath(), + body: this.body, + duration: this.duration, + err: this.err, + parent: { + $$fullTitle: this.parent.fullTitle() + }, + speed: this.speed, + title: this.title, + type: this.type + }); +}; diff --git a/lib/worker.js b/lib/worker.js index 84d80e9c31..f9696add4c 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -1,9 +1,10 @@ 'use strict'; -const {expose} = require('threads/worker'); +const workerpool = require('workerpool'); const Mocha = require('./mocha'); const {handleRequires, validatePlugin} = require('./cli/run-helpers'); - +const debug = require('debug')('mocha:worker'); +const {serializeObject} = require('./serializer'); let bootstrapped = false; /** @@ -14,6 +15,7 @@ let bootstrapped = false; * serializable event data */ async function run(file, argv) { + debug('running test file %s on process [%d]', file, process.pid); // the buffered reporter retains its events; these events are returned // from this function back to the main process. argv.reporter = require.resolve('./reporters/buffered'); @@ -25,15 +27,39 @@ async function run(file, argv) { handleRequires(argv.require); validatePlugin(argv, 'ui', Mocha.interfaces); bootstrapped = true; + debug('bootstrapped process [%d]', process.pid); } const mocha = new Mocha(argv); mocha.files = [file]; - await mocha.loadFilesAsync(); - return new Promise(resolve => { - mocha.run((failures, events) => { - resolve([failures, events]); + try { + await mocha.loadFilesAsync(); + } catch (err) { + debug( + 'process [%d] rejecting; could not load file %s: %s', + process.pid, + file, + err + ); + throw err; + } + return new Promise((resolve, reject) => { + function workerRejection(err) { + debug('process [%d] rejecting due to uncaught exception', process.pid); + reject(err); + } + process.once('uncaughtException', workerRejection); + mocha.run(result => { + process.removeAllListeners('uncaughtException'); + debug('process [%d] resolving', process.pid); + resolve(serializeObject(result)); }); }); } -expose(run); +workerpool.worker({ + run +}); + +process.on('beforeExit', () => { + debug('process [%d] exiting', process.pid); +}); diff --git a/package.json b/package.json index 24be5c2067..ae4b71eadc 100644 --- a/package.json +++ b/package.json @@ -60,9 +60,9 @@ "object.assign": "4.1.0", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", - "threads": "^1.3.0", "which": "2.0.2", "wide-align": "1.1.3", + "workerpool": "^5.0.4", "yargs": "13.3.2", "yargs-parser": "13.1.2", "yargs-unparser": "1.6.0" diff --git a/test/integration/diffs.spec.js b/test/integration/diffs.spec.js index 44b30de1ae..ac9ad18d26 100644 --- a/test/integration/diffs.spec.js +++ b/test/integration/diffs.spec.js @@ -72,7 +72,7 @@ describe('diffs', function() { var diffs, expected; before(function(done) { - run('diffs/diffs.fixture.js', ['-C'], function(err, res) { + run('diffs/diffs.fixture.js', [], function(err, res) { if (err) { done(err); return; diff --git a/test/integration/helpers.js b/test/integration/helpers.js index 6cdf7e93cf..dc112206bb 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -4,7 +4,7 @@ var format = require('util').format; var spawn = require('cross-spawn').spawn; var path = require('path'); var Base = require('../../lib/reporters/base'); - +var debug = require('debug')('mocha:tests:integratin:helpers'); var DEFAULT_FIXTURE = resolveFixturePath('__default__'); var MOCHA_EXECUTABLE = require.resolve('../../bin/mocha'); var _MOCHA_EXECUTABLE = require.resolve('../../bin/_mocha'); @@ -47,7 +47,7 @@ module.exports = { args = args || []; invokeSubMocha( - args.concat(['-C', path]), + args.concat(path), function(err, res) { if (err) { return fn(err); @@ -182,7 +182,17 @@ function toJSONRunResult(result) { * @returns string[] */ function defaultArgs(args) { - return !args || !args.length ? ['--file', DEFAULT_FIXTURE] : args; + var newArgs = (!args || !args.length + ? ['--file', DEFAULT_FIXTURE] + : args + ).concat(['--no-color']); + if (!newArgs.some(arg => /--bail/.test(arg))) { + newArgs.push('--no-bail'); + } + if (!newArgs.some(arg => /--parallel/.test(arg))) { + newArgs.push('--no-parallel'); + } + return newArgs; } function invokeMocha(args, fn, opts) { @@ -248,6 +258,7 @@ function invokeSubMocha(args, fn, opts) { fn = args; args = []; } + debug(defaultArgs([_MOCHA_EXECUTABLE].concat(args))); return _spawnMochaWithListeners( defaultArgs([_MOCHA_EXECUTABLE].concat(args)), fn, diff --git a/test/unit/hook-timeout.spec.js b/test/unit/hook-timeout.spec.js index df3605518a..8c1b1f4735 100644 --- a/test/unit/hook-timeout.spec.js +++ b/test/unit/hook-timeout.spec.js @@ -1,8 +1,7 @@ 'use strict'; before(function(done) { - this.timeout(100); - setTimeout(done, 50); + setTimeout(done, 100); }); it('should work', function(done) { diff --git a/test/unit/throw.spec.js b/test/unit/throw.spec.js index 2dc3c8a759..1e02a3a085 100644 --- a/test/unit/throw.spec.js +++ b/test/unit/throw.spec.js @@ -25,6 +25,7 @@ describe('a test that throws', function() { }); afterEach(function() { + process.removeAllListeners('uncaughtException'); uncaughtHandlers.forEach(function(listener) { process.on('uncaughtException', listener); }); From db4b9a9fdacc2559e71651e029ddc3b43a72a583 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 12 Mar 2020 14:58:11 -0700 Subject: [PATCH 03/92] revert refactor of setting test.speed in runner increase timeout in `mocha.spec.js` "unit" test --- lib/reporters/base.js | 11 +++++++++++ lib/runner.js | 8 -------- test/node-unit/mocha.spec.js | 1 + 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/lib/reporters/base.js b/lib/reporters/base.js index 50b5f63c96..ea259445e3 100644 --- a/lib/reporters/base.js +++ b/lib/reporters/base.js @@ -12,6 +12,7 @@ var milliseconds = require('ms'); var utils = require('../utils'); var supportsColor = process.browser ? null : require('supports-color'); var constants = require('../runner').constants; +var EVENT_TEST_PASS = constants.EVENT_TEST_PASS; var EVENT_TEST_FAIL = constants.EVENT_TEST_FAIL; /** @@ -303,6 +304,16 @@ function Base(runner, options) { this.runner = runner; this.stats = runner.stats; // assigned so Reporters keep a closer reference + runner.on(EVENT_TEST_PASS, function(test) { + if (test.duration > test.slow()) { + test.speed = 'slow'; + } else if (test.duration > test.slow() / 2) { + test.speed = 'medium'; + } else { + test.speed = 'fast'; + } + }); + runner.on(EVENT_TEST_FAIL, function(test, err) { if (showDiff(err)) { stringifyDiffObjs(err); diff --git a/lib/runner.js b/lib/runner.js index 9226fb291f..c60e562a81 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -694,14 +694,6 @@ Runner.prototype.runTests = function(suite, fn) { } test.state = STATE_PASSED; - var slow = test.slow(); - if (test.duration > slow) { - test.speed = 'slow'; - } else if (test.duration > slow / 2) { - test.speed = 'medium'; - } else { - test.speed = 'fast'; - } self.emit(constants.EVENT_TEST_PASS, test); self.emit(constants.EVENT_TEST_END, test); self.hookUp(HOOK_TYPE_AFTER_EACH, next); diff --git a/test/node-unit/mocha.spec.js b/test/node-unit/mocha.spec.js index 314a012023..d665589dca 100644 --- a/test/node-unit/mocha.spec.js +++ b/test/node-unit/mocha.spec.js @@ -28,6 +28,7 @@ describe('Mocha', function() { describe('#loadFiles', function() { it('should load all files from the files array', function() { + this.timeout(1000); const mocha = new Mocha(opts); testFiles.forEach(mocha.addFile, mocha); From 7eac21a08b63bfe96c1e848fe6630117ed3133f7 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 12 Mar 2020 15:08:28 -0700 Subject: [PATCH 04/92] pull in promise.allsettled shim --- lib/buffered-runner.js | 3 ++- package.json | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 775fc6c2c0..8ed3008294 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -1,5 +1,6 @@ 'use strict'; +const allSettled = require('promise.allsettled'); const os = require('os'); const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; @@ -48,7 +49,7 @@ class BufferedRunner extends Runner { const poolProxy = await pool.proxy(); // const tasks = new Set( - const results = await Promise.allSettled( + const results = await allSettled( files.map(async file => { debug('enqueueing test file %s', file); try { diff --git a/package.json b/package.json index ae4b71eadc..388e578620 100644 --- a/package.json +++ b/package.json @@ -58,6 +58,7 @@ "minimatch": "3.0.4", "ms": "2.1.2", "object.assign": "4.1.0", + "promise.allsettled": "^1.0.2", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", "which": "2.0.2", From d4a649e1e4e326dcae42e5602be39077637b7cdb Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 12 Mar 2020 15:32:08 -0700 Subject: [PATCH 05/92] remove concurrency stuff from bundle --- package-scripts.js | 2 +- package.json | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/package-scripts.js b/package-scripts.js index 5e0b1736ef..5dad561a40 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -23,7 +23,7 @@ function test(testName, mochaParams) { module.exports = { scripts: { build: { - script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' -o mocha.js`, + script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore "./lib/buffered-runner.js" --ignore "./lib/serializer.js" --ignore "./lib/reporters/buffered.js" -o mocha.js`, description: 'Build browser bundle' }, lint: { diff --git a/package.json b/package.json index 388e578620..de9d9b3f31 100644 --- a/package.json +++ b/package.json @@ -151,7 +151,10 @@ "fs": false, "glob": false, "path": false, - "supports-color": false + "supports-color": false, + "./lib/serializer.js": false, + "./lib/reporters/buffered.js": false, + "./lib/buffered-reporter.js": false }, "prettier": { "singleQuote": true, From 61ab96e044ece65eff41aa648a11e61f65adeddb Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 13 Mar 2020 10:54:23 -0700 Subject: [PATCH 06/92] string tweak in package-scripts.js --- package-scripts.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package-scripts.js b/package-scripts.js index 5dad561a40..9c68e3a915 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -23,7 +23,7 @@ function test(testName, mochaParams) { module.exports = { scripts: { build: { - script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore "./lib/buffered-runner.js" --ignore "./lib/serializer.js" --ignore "./lib/reporters/buffered.js" -o mocha.js`, + script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore './lib/buffered-runner.js' --ignore './lib/serializer.js' --ignore './lib/reporters/buffered.js' -o mocha.js`, description: 'Build browser bundle' }, lint: { From fbf7d8ae2f5213e57416d5a3ee62308789f787ff Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 13 Mar 2020 11:29:57 -0700 Subject: [PATCH 07/92] browserify fixes --- .travis.yml | 15 ++++++++------- karma.conf.js | 4 ++++ package-scripts.js | 2 +- package.json | 3 ++- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index a3874504dd..7fdae25520 100644 --- a/.travis.yml +++ b/.travis.yml @@ -84,10 +84,11 @@ jobs: notifications: email: false - urls: - # for gitter mochajs/mocha - - secure: fUrHenYJs+pTuLtgBRoYyrlyfVekxaIGmLWq7bhUUqBj/7p5eCkQFn13LlPht0/4WWZOiPBcdTN7tKnz3Ho7ATUJhAchvOWDUgL5gtTvOzeCHbPuCvHz/VLK6hMoPdbLA45M864NDLotfHvyh62WgQaVw9iPc80eb+umaDPrYiU= - # for gitter mochajs/contributors - - secure: rGMGYWBaZgEa9i997jJHKzjI8WxECqLi6BqsMhvstDq9EeTeXkZFVfz4r6G3Xugsk3tFwb/pDpiYo1OK36kA5arUJTCia51u4Wn+c7lHKcpef/vXztoyucvw6/jXdVm/FQz1jztYYbqdyAOWC2BV8gYvg5F8TpK05UGCe5R0bRA= - on_success: change - on_failure: always + webhooks: + urls: + # for gitter mochajs/mocha + - secure: fUrHenYJs+pTuLtgBRoYyrlyfVekxaIGmLWq7bhUUqBj/7p5eCkQFn13LlPht0/4WWZOiPBcdTN7tKnz3Ho7ATUJhAchvOWDUgL5gtTvOzeCHbPuCvHz/VLK6hMoPdbLA45M864NDLotfHvyh62WgQaVw9iPc80eb+umaDPrYiU= + # for gitter mochajs/contributors + - secure: rGMGYWBaZgEa9i997jJHKzjI8WxECqLi6BqsMhvstDq9EeTeXkZFVfz4r6G3Xugsk3tFwb/pDpiYo1OK36kA5arUJTCia51u4Wn+c7lHKcpef/vXztoyucvw6/jXdVm/FQz1jztYYbqdyAOWC2BV8gYvg5F8TpK05UGCe5R0bRA= + on_success: change + on_failure: always diff --git a/karma.conf.js b/karma.conf.js index 9337e3d3b8..86975e9d48 100644 --- a/karma.conf.js +++ b/karma.conf.js @@ -37,6 +37,10 @@ module.exports = config => { .ignore('./lib/esm-utils.js') .ignore('path') .ignore('supports-color') + .ignore('./lib/buffered-runner.js') + .ignore('./lib/reporters/buffered.js') + .ignore('./lib/serializer.js') + .ignore('./lib/worker.js') .on('bundled', (err, content) => { if (err) { throw err; diff --git a/package-scripts.js b/package-scripts.js index 9c68e3a915..fe4395aed4 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -23,7 +23,7 @@ function test(testName, mochaParams) { module.exports = { scripts: { build: { - script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore './lib/buffered-runner.js' --ignore './lib/serializer.js' --ignore './lib/reporters/buffered.js' -o mocha.js`, + script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore './lib/buffered-runner.js' --ignore './lib/serializer.js' --ignore './lib/reporters/buffered.js' --ignore './lib/worker.js' -o mocha.js`, description: 'Build browser bundle' }, lint: { diff --git a/package.json b/package.json index de9d9b3f31..5ef2f1da5b 100644 --- a/package.json +++ b/package.json @@ -154,7 +154,8 @@ "supports-color": false, "./lib/serializer.js": false, "./lib/reporters/buffered.js": false, - "./lib/buffered-reporter.js": false + "./lib/buffered-reporter.js": false, + "./lib/worker.js": false }, "prettier": { "singleQuote": true, From 485d6ab849ba2a8a838a126b78a7fbbadc4d1043 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 18 Mar 2020 15:04:15 -0700 Subject: [PATCH 08/92] add some tests - 100% coverage for `lib/serializer.js` - add a trivial integration test for `--parallel` - fix typo in integration test helper; output spawned command in a copy/pastable format - remove some unused code - rename `deserializeMessage` => `deserialize` - rename `serializeObject` => `serialize` - docstrings for `lib/serializer.js` - rewrite `SerializableEvent.serialize` as a loop instead of recursive function due to possibility of exceeding max stack trace; other refactors - do not freeze objects returned from various `Runnable`'s `serialize()` method, because `SerializableEvent#serialize` needs to mutate them. --- lib/buffered-runner.js | 6 +- lib/hook.js | 4 +- lib/serializer.js | 258 +++++++++++---- lib/suite.js | 4 +- lib/test.js | 4 +- lib/worker.js | 14 +- .../fixtures/options/parallel/a.fixture.js | 3 + .../fixtures/options/parallel/b.fixture.js | 3 + .../fixtures/options/parallel/c.fixture.js | 5 + .../fixtures/options/parallel/d.fixture.js | 7 + test/integration/helpers.js | 3 +- test/integration/options/parallel.spec.js | 24 ++ test/node-unit/serializer.spec.js | 296 ++++++++++++++++++ 13 files changed, 557 insertions(+), 74 deletions(-) create mode 100644 test/integration/fixtures/options/parallel/a.fixture.js create mode 100644 test/integration/fixtures/options/parallel/b.fixture.js create mode 100644 test/integration/fixtures/options/parallel/c.fixture.js create mode 100644 test/integration/fixtures/options/parallel/d.fixture.js create mode 100644 test/integration/options/parallel.spec.js create mode 100644 test/node-unit/serializer.spec.js diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 8ed3008294..031fc5c50c 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -6,7 +6,7 @@ const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; const debug = require('debug')('mocha:buffered-runner'); const workerpool = require('workerpool'); -const {deserializeMessage} = require('./serializer'); +const {deserialize} = require('./serializer'); /** * This `Runner` delegates tests runs to worker threads. Does not execute any @@ -48,12 +48,11 @@ class BufferedRunner extends Runner { this.emit(EVENT_RUN_BEGIN); const poolProxy = await pool.proxy(); - // const tasks = new Set( const results = await allSettled( files.map(async file => { debug('enqueueing test file %s', file); try { - const {failures, events} = deserializeMessage( + const {failures, events} = deserialize( await poolProxy.run(file, opts) ); debug( @@ -91,6 +90,7 @@ class BufferedRunner extends Runner { await pool.terminate(); + // XXX I'm not sure this is ever non-empty const uncaughtExceptions = results.filter( ({status}) => status === 'rejected' ); diff --git a/lib/hook.js b/lib/hook.js index 7936cda495..aa031e283f 100644 --- a/lib/hook.js +++ b/lib/hook.js @@ -46,7 +46,7 @@ Hook.prototype.error = function(err) { }; Hook.prototype.serialize = function serialize() { - return Object.freeze({ + return { $$titlePath: this.titlePath(), ctx: { currentTest: { @@ -59,5 +59,5 @@ Hook.prototype.serialize = function serialize() { }, title: this.title, type: this.type - }); + }; }; diff --git a/lib/serializer.js b/lib/serializer.js index f9058511a1..28a2bdfe59 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -1,12 +1,17 @@ 'use strict'; +const {type} = require('./utils'); +const {createInvalidArgumentTypeError} = require('./errors'); // const debug = require('debug')('mocha:serializer'); +const SERIALIZABLE_RESULT_NAME = 'SerializableWorkerResult'; +const SERIALIZABLE_TYPES = new Set(['object', 'array', 'function', 'error']); + class SerializableWorkerResult { constructor(failures, events) { this.failures = failures; this.events = events; - this.__type = 'SerializableWorkerResult'; + this.__type = SERIALIZABLE_RESULT_NAME; } static create(...args) { @@ -24,90 +29,198 @@ class SerializableWorkerResult { obj.events.forEach(SerializableEvent.deserialize); return obj; } + + /** + * Returns `true` if this is a {@link SerializableWorkerResult}, even if serialized + * (in other words, not an instance). + * + * @param {*} value - A value to check + */ + static isSerializableWorkerResult(value) { + return ( + type(value) === 'object' && value.__type === SERIALIZABLE_RESULT_NAME + ); + } } +/** + * Represents an event, emitted by a {@link Runner}, which is to be transmitted + * over IPC. + * + * Due to the contents of the event data, it's not possible to send them verbatim. + * When received by the main process--and handled by reporters--these objects are + * expected to contain {@link Runnable} instances. This class provides facilities + * to perform the translation via serialization and deserialization. + */ class SerializableEvent { - constructor(eventName, rawObject, error) { + /** + * Constructs a `SerializableEvent`, throwing if we receive unexpected data. + * + * Practically, events emitted from `Runner` have a minumum of zero (0) arguments-- + * (for example, {@link Runnable.constants.EVENT_RUN_BEGIN}) and a maximum of two (2) + * (for example, {@link Runnable.constants.EVENT_TEST_FAIL}, where the second argument + * is an `Error`). The first argument, if present, is a {@link Runnable}. + * This constructor's arguments adhere to this convention. + * @param {string} eventName - A non-empty event name. + * @param {any} [originalValue] - Some data. Corresponds to extra arguments passed to `EventEmitter#emit`. + * @param {Error} [originalError] - An error, if there's an error. + * @throws If `eventName` is empty, or `originalValue` is a non-object. + */ + constructor(eventName, originalValue, originalError) { + if (!eventName) { + throw new Error('expected a non-empty `eventName` argument'); + } + /** + * The event name. + * @memberof SerializableEvent + */ this.eventName = eventName; - if (rawObject && typeof rawObject !== 'object') { + const originalValueType = type(originalValue); + if (originalValueType !== 'object' && originalValueType !== 'undefined') { throw new Error( - `expected object, received [${typeof rawObject}]: ${rawObject}` + `expected object, received [${originalValueType}]: ${originalValue}` ); } - this.error = error; - // we don't want this value sent via IPC. - Object.defineProperty(this, 'rawObject', { - value: rawObject, + /** + * An error, if present. + * @memberof SerializableEvent + */ + Object.defineProperty(this, 'originalError', { + value: originalError, + enumerable: false + }); + + /** + * The raw value. + * + * We don't want this value sent via IPC; making it non-enumerable will do that. + * + * @memberof SerializableEvent + */ + Object.defineProperty(this, 'originalValue', { + value: originalValue, enumerable: false }); } + /** + * In case you hated using `new` (I do). + * + * @param {...any} args - Args for {@link SerializableEvent#constructor}. + * @returns {SerializableEvent} A new `SerializableEvent` + */ static create(...args) { return new SerializableEvent(...args); } + /** + * Modifies this object *in place* (for theoretical memory consumption & performance + * reasons); serializes `SerializableEvent#originalValue` (placing the result in + * `SerializableEvent#data`) and `SerializableEvent#error`. Freezes this object. + * The result is an object that can be transmitted over IPC. + */ serialize() { - const createError = err => { - const _serializeError = ([value, key]) => { - if (value) { - if (typeof value[key] === 'object') { - const obj = value[key]; - Object.keys(obj) - .map(key => [obj[key], key]) - .forEach(_serializeError); - } else if (typeof value[key] === 'function') { - delete value[key]; - } - } - }; - const error = { - message: err.message, - stack: err.stack, - __type: 'Error' - }; - - Object.keys(err) - .map(key => [err[key], key]) - .forEach(_serializeError); - return error; - }; - const obj = this.rawObject; - this.data = Object.create(null); - Object.assign( - this.data, - typeof obj.serialize === 'function' ? obj.serialize() : obj - ); - Object.keys(this.data).forEach(key => { - if (this.data[key] instanceof Error) { - this.data[key] = createError(this.data[key]); + // list of types within values that we will attempt to serialize + + // given a parent object and a key, inspect the value and decide whether + // to replace it, remove it, or add it to our `pairs` array to further process. + // this is recursion in loop form. + const _serialize = (parent, key) => { + let value = parent[key]; + switch (type(value)) { + case 'error': + // we need to reference the stack prop b/c it's lazily-loaded. + // `__type` is necessary for deserialization to create an `Error` later. + // fall through to the 'object' branch below to further process & remove + // any junk that an assertion lib may throw in there. + // `message` is apparently not enumerable, so we must handle it specifically. + value = Object.assign(Object.create(null), value, { + stack: value.stack, + message: value.message, + __type: 'Error' + }); + parent[key] = value; + // falls through + case 'object': + // by adding props to the `pairs` array, we will process it further + pairs.push( + ...Object.keys(value) + .filter(key => SERIALIZABLE_TYPES.has(type(value[key]))) + .map(key => [value, key]) + ); + break; + case 'function': + // we _may_ want to dig in to functions for some assertion libraries + // that might put a usable property on a function. + // for now, just zap it. + delete parent[key]; + break; + case 'array': + pairs.push( + ...value + .filter(value => SERIALIZABLE_TYPES.has(type(value))) + .map((value, index) => [value, index]) + ); + break; } + }; + + const result = Object.assign(Object.create(null), { + data: + type(this.originalValue) === 'object' && + type(this.originalValue.serialize) === 'function' + ? this.originalValue.serialize() + : this.originalValue, + error: this.originalError }); - if (this.error) { - this.error = createError(this.error); + + const pairs = Object.keys(result).map(key => [result, key]); + + let pair; + while ((pair = pairs.shift())) { + _serialize(...pair); } + + this.data = result.data; + this.error = result.error; + return Object.freeze(this); } + /** + * Deserialize value returned from a worker into something more useful. + * Does not return the same object. + * @todo - do this in a loop instead of with recursion (if necessary) + * @param {SerializedEvent} obj - Object returned from worker + * @returns {SerializedEvent} Deserialized result + */ static deserialize(obj) { const createError = value => { const error = new Error(value.message); error.stack = value.stack; Object.assign(error, value); + delete error.__type; return error; }; const _deserialize = ([object, key]) => { - const value = typeof key !== 'undefined' ? object[key] : object; - if (typeof key === 'string' && key.startsWith('$$')) { + if (key === '__proto__') { + delete object[key]; + return; + } + const value = type(key) !== 'undefined' ? object[key] : object; + // keys beginning with `$$` are converted into functions returning the value + // and renamed, stripping the `$$` prefix + if (type(key) === 'string' && key.startsWith('$$')) { const newKey = key.slice(2); object[newKey] = () => value; delete object[key]; key = newKey; } - if (Array.isArray(value)) { + if (type(value) === 'array') { value.forEach((_, idx) => { _deserialize([value, idx]); }); - } else if (value && typeof value === 'object') { + } else if (type(value) === 'object') { if (value.__type === 'Error') { object[key] = createError(value); } else { @@ -118,27 +231,60 @@ class SerializableEvent { } }; - Object.keys(obj.data) - .map(key => [obj.data, key]) - .forEach(_deserialize); + if (!obj) { + throw createInvalidArgumentTypeError('Expected value', obj); + } + + obj = Object.assign(Object.create(null), obj); + + if (obj.data) { + Object.keys(obj.data) + .map(key => [obj.data, key]) + .forEach(_deserialize); + } + if (obj.error) { obj.error = createError(obj.error); } + return obj; } } -exports.serializeObject = function serializeObject(obj) { - return obj instanceof SerializableWorkerResult ? obj.serialize() : obj; +/** + * "Serializes" a value for transmission over IPC as a message. + * + * If value is an object and has a `serialize()` method, call that method; otherwise return the object and hope for the best. + * + * @param {*} obj - A value to serialize + */ +exports.serialize = function serialize(value) { + return type(value) === 'object' && type(value.serialize) === 'function' + ? value.serialize() + : value; }; -exports.deserializeMessage = function deserializeMessage(message) { - return message && - typeof message === 'object' && - message.__type === 'SerializableWorkerResult' +/** + * "Deserializes" a "message" received over IPC. + * + * This could be expanded with other objects that need deserialization, + * but at present time we only care about {@link SerializableWorkerResult} objects. + * + * @param {*} message - A "message" to deserialize + */ +exports.deserialize = function deserialize(message) { + return SerializableWorkerResult.isSerializableWorkerResult(message) ? SerializableWorkerResult.deserialize(message) : message; }; exports.SerializableEvent = SerializableEvent; exports.SerializableWorkerResult = SerializableWorkerResult; + +/** + * The result of calling `SerializableEvent.serialize`, as received + * by the deserializer. + * @typedef {Object} SerializedEvent + * @property {object?} data - Optional serialized data + * @property {object?} error - Optional serialized `Error` + */ diff --git a/lib/suite.js b/lib/suite.js index e6e8c24413..62030fd4b6 100644 --- a/lib/suite.js +++ b/lib/suite.js @@ -555,12 +555,12 @@ Suite.prototype.cleanReferences = function cleanReferences() { * @returns {Object} */ Suite.prototype.serialize = function serialize() { - return Object.freeze({ + return { _bail: this._bail, $$fullTitle: this.fullTitle(), root: this.root, title: this.title - }); + }; }; var constants = utils.defineConstants( diff --git a/lib/test.js b/lib/test.js index e87a245ade..391f613e73 100644 --- a/lib/test.js +++ b/lib/test.js @@ -69,7 +69,7 @@ Test.prototype.clone = function() { * @returns {Object} */ Test.prototype.serialize = function serialize() { - return Object.freeze({ + return { $$currentRetry: this._currentRetry, $$fullTitle: this.fullTitle(), $$retriedTest: this._retriedTest || null, @@ -84,5 +84,5 @@ Test.prototype.serialize = function serialize() { speed: this.speed, title: this.title, type: this.type - }); + }; }; diff --git a/lib/worker.js b/lib/worker.js index f9696add4c..60072a68fc 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -4,7 +4,7 @@ const workerpool = require('workerpool'); const Mocha = require('./mocha'); const {handleRequires, validatePlugin} = require('./cli/run-helpers'); const debug = require('debug')('mocha:worker'); -const {serializeObject} = require('./serializer'); +const {serialize} = require('./serializer'); let bootstrapped = false; /** @@ -42,16 +42,14 @@ async function run(file, argv) { ); throw err; } - return new Promise((resolve, reject) => { - function workerRejection(err) { - debug('process [%d] rejecting due to uncaught exception', process.pid); - reject(err); - } - process.once('uncaughtException', workerRejection); + return new Promise(resolve => { + // TODO: figure out exactly what the sad path looks like here. + // will depend on allowUncaught + // rejection should only happen if an error is "unrecoverable" mocha.run(result => { process.removeAllListeners('uncaughtException'); debug('process [%d] resolving', process.pid); - resolve(serializeObject(result)); + resolve(serialize(result)); }); }); } diff --git a/test/integration/fixtures/options/parallel/a.fixture.js b/test/integration/fixtures/options/parallel/a.fixture.js new file mode 100644 index 0000000000..43f53bbda8 --- /dev/null +++ b/test/integration/fixtures/options/parallel/a.fixture.js @@ -0,0 +1,3 @@ +describe('a', function() { + it('should pass', function() {}); +}); diff --git a/test/integration/fixtures/options/parallel/b.fixture.js b/test/integration/fixtures/options/parallel/b.fixture.js new file mode 100644 index 0000000000..8e6437a56a --- /dev/null +++ b/test/integration/fixtures/options/parallel/b.fixture.js @@ -0,0 +1,3 @@ +describe('b', function() { + it('should be pending'); +}); diff --git a/test/integration/fixtures/options/parallel/c.fixture.js b/test/integration/fixtures/options/parallel/c.fixture.js new file mode 100644 index 0000000000..d06b6a3ee6 --- /dev/null +++ b/test/integration/fixtures/options/parallel/c.fixture.js @@ -0,0 +1,5 @@ +describe('c', function() { + it('should fail', function() { + throw new Error('failure'); + }); +}); diff --git a/test/integration/fixtures/options/parallel/d.fixture.js b/test/integration/fixtures/options/parallel/d.fixture.js new file mode 100644 index 0000000000..ee19d54594 --- /dev/null +++ b/test/integration/fixtures/options/parallel/d.fixture.js @@ -0,0 +1,7 @@ +describe('d', function() { + it('should pass, then fail', function() { + process.nextTick(function() { + throw new Error('uncaught!!'); + }); + }); +}); diff --git a/test/integration/helpers.js b/test/integration/helpers.js index dc112206bb..b84048c5a3 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -4,7 +4,7 @@ var format = require('util').format; var spawn = require('cross-spawn').spawn; var path = require('path'); var Base = require('../../lib/reporters/base'); -var debug = require('debug')('mocha:tests:integratin:helpers'); +var debug = require('debug')('mocha:tests:integration:helpers'); var DEFAULT_FIXTURE = resolveFixturePath('__default__'); var MOCHA_EXECUTABLE = require.resolve('../../bin/mocha'); var _MOCHA_EXECUTABLE = require.resolve('../../bin/_mocha'); @@ -287,6 +287,7 @@ function _spawnMochaWithListeners(args, fn, opts) { }, opts || {} ); + debug('spawning: %s', [process.execPath].concat(args).join(' ')); var mocha = spawn(process.execPath, args, opts); var listener = function(data) { output += data; diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js new file mode 100644 index 0000000000..5f407486b6 --- /dev/null +++ b/test/integration/options/parallel.spec.js @@ -0,0 +1,24 @@ +'use strict'; + +var path = require('path'); +var helpers = require('../helpers'); +var runMochaJSON = helpers.runMochaJSON; + +describe('--parallel', function() { + it('should not appear fundamentally different than without', function(done) { + runMochaJSON( + path.join('options', 'parallel', '*.fixture.js'), + ['--parallel'], + function(err, res) { + if (err) { + return done(err); + } + expect(res, 'to have failed') + .and('to have passed test count', 2) + .and('to have pending test count', 1) + .and('to have failed test count', 2); + done(); + } + ); + }); +}); diff --git a/test/node-unit/serializer.spec.js b/test/node-unit/serializer.spec.js new file mode 100644 index 0000000000..62567b31ec --- /dev/null +++ b/test/node-unit/serializer.spec.js @@ -0,0 +1,296 @@ +'use strict'; + +const {createSandbox} = require('sinon'); +const {SerializableEvent} = require('../../lib/serializer'); + +describe('SerializableEvent', function() { + let sandbox; + + beforeEach(function() { + sandbox = createSandbox(); + }); + + afterEach(function() { + sandbox.restore(); + }); + + describe('constructor', function() { + describe('when called without `eventName`', function() { + it('should throw', function() { + expect( + () => new SerializableEvent(), + 'to throw', + /expected a non-empty `eventName`/ + ); + }); + }); + + describe('when called with a non-object `rawObject`', function() { + it('should throw', function() { + expect( + () => new SerializableEvent('blub', 'glug'), + 'to throw', + /expected object, received \[string\]/ + ); + }); + }); + }); + + describe('instance method', function() { + describe('serialize', function() { + it('should mutate the instance in-place', function() { + const evt = SerializableEvent.create('foo'); + expect(evt.serialize(), 'to be', evt); + }); + + it('should freeze the instance', function() { + expect( + Object.isFrozen(SerializableEvent.create('foo').serialize()), + 'to be true' + ); + }); + + describe('when passed an object with a `serialize` method', function() { + it('should call the `serialize` method', function() { + const obj = { + serialize: sandbox.stub() + }; + SerializableEvent.create('some-event', obj).serialize(); + expect(obj.serialize, 'was called once'); + }); + }); + + describe('when passed an object containing a non-`serialize` method', function() { + it('should remove functions', function() { + const obj = { + func: () => {} + }; + + expect( + SerializableEvent.create('some-event', obj).serialize(), + 'to satisfy', + { + data: expect.it('not to have property', 'func') + } + ); + }); + }); + + describe('when passed an object containing an array', function() { + it('should serialize the array', function() { + const obj = { + list: [{herp: 'derp'}, {bing: 'bong'}] + }; + expect( + SerializableEvent.create('some-event', obj).serialize(), + 'to satisfy', + {data: {list: [{herp: 'derp'}, {bing: 'bong'}]}} + ); + }); + }); + + describe('when passed an error', function() { + it('should serialize the error', function() { + const obj = {}; + const err = new Error('monkeypants'); + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + + it('should retain own props', function() { + const obj = {}; + const err = new Error('monkeypants'); + err.code = 'MONKEY'; + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + code: 'MONKEY', + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + + it('should not retain not-own props', function() { + const obj = {}; + const err = new Error('monkeypants'); + // eslint-disable-next-line no-proto + err.__proto__.code = 'MONKEY'; + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + }); + + describe('when passed an object containing a top-level prop with an Error value', function() { + it('should serialize the Error', function() { + const obj = { + monkeyError: new Error('pantsmonkey') + }; + const evt = SerializableEvent.create('some-event', obj); + expect(evt.serialize(), 'to satisfy', { + eventName: 'some-event', + data: { + monkeyError: { + message: 'pantsmonkey', + stack: /^Error: pantsmonkey/, + __type: 'Error' + } + } + }); + }); + }); + describe('when passed an object containing a nested prop with an Error value', function() { + it('should serialize the Error', function() { + const obj = { + nestedObj: { + monkeyError: new Error('pantsmonkey') + } + }; + const evt = SerializableEvent.create('some-event', obj); + expect(evt.serialize(), 'to satisfy', { + eventName: 'some-event', + data: { + nestedObj: { + monkeyError: { + message: 'pantsmonkey', + stack: /^Error: pantsmonkey/, + __type: 'Error' + } + } + } + }); + }); + }); + }); + }); + + describe('static method', function() { + describe('deserialize', function() { + describe('when passed a falsy parameter', function() { + it('should throw "invalid arg type" error', function() { + expect(SerializableEvent.deserialize, 'to throw', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + it('should return a new object w/ null prototype', function() { + const obj = {bob: 'bob'}; + expect(SerializableEvent.deserialize(obj), 'to satisfy', obj) + .and('not to equal', obj) + .and('not to have property', 'constructor'); + }); + + describe('when passed value contains `data` prop', function() { + it('should ignore __proto__', function() { + const obj = { + data: Object.create(null) + }; + // eslint-disable-next-line no-proto + obj.data.__proto__ = {peaches: 'prunes'}; + + const expected = Object.assign(Object.create(null), { + data: Object.create(null) + }); + expect(SerializableEvent.deserialize(obj), 'to equal', expected); + }); + + describe('when `data` prop contains a nested serialized Error prop', function() { + it('should create an Error instance from the nested serialized Error prop', function() { + const message = 'problems!'; + const stack = 'problem instructions'; + const code = 'EIEIO'; + const expected = Object.assign(Object.create(null), { + data: { + whoops: Object.assign(new Error(message), { + stack, + code + }) + } + }); + + expect( + SerializableEvent.deserialize({ + data: { + whoops: { + message, + stack, + code, + __type: 'Error' + } + } + }), + 'to equal', + expected + ); + }); + }); + }); + + describe('when passed value contains an `error` prop', function() { + it('should create an Error instance from the prop', function() { + const message = 'problems!'; + const stack = 'problem instructions'; + const code = 'EIEIO'; + const expected = Object.assign(Object.create(null), { + error: Object.assign(new Error(message), { + stack, + code + }) + }); + + expect( + SerializableEvent.deserialize({ + error: { + message, + stack, + code, + __type: 'Error' + } + }), + 'to equal', + expected + ); + }); + }); + }); + + describe('create', function() { + it('should instantiate a SerializableEvent', function() { + expect( + SerializableEvent.create('some-event'), + 'to be a', + SerializableEvent + ); + }); + }); + }); +}); From e604fa0657e2c240c6abd76caa1f4ee73ff2e650 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 23 Mar 2020 15:54:15 -0700 Subject: [PATCH 09/92] add a lot of unit test coverage around serialization/deserialization - add more comments/docstrings - some refactors --- lib/reporters/buffered.js | 2 +- lib/serializer.js | 316 +++++++++------ test/node-unit/serializer.spec.js | 642 +++++++++++++++++++++--------- 3 files changed, 651 insertions(+), 309 deletions(-) diff --git a/lib/reporters/buffered.js b/lib/reporters/buffered.js index 8bf21da320..9be6d3df48 100644 --- a/lib/reporters/buffered.js +++ b/lib/reporters/buffered.js @@ -87,7 +87,7 @@ class Buffered extends Base { * @param {Function} callback - The callback passed to {@link Mocha#run}. */ done(failures, callback) { - callback(SerializableWorkerResult.create(failures, this.events)); + callback(SerializableWorkerResult.create(this.events, failures)); this.events = []; } } diff --git a/lib/serializer.js b/lib/serializer.js index 28a2bdfe59..d19cff32d0 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -7,17 +7,57 @@ const {createInvalidArgumentTypeError} = require('./errors'); const SERIALIZABLE_RESULT_NAME = 'SerializableWorkerResult'; const SERIALIZABLE_TYPES = new Set(['object', 'array', 'function', 'error']); +/** + * The serializable result of a test file run from a worker. + */ class SerializableWorkerResult { - constructor(failures, events) { + /** + * Creates instance props; of note, the `__type` prop. + * + * Note that the failure count is _redundant_ and could be derived from the + * list of events; but since we're already doing the work, might as well use + * it. + * @param {SerializableEvent[]} [events=[]] - Events to eventually serialize + * @param {number} [failures=0] - Failure count + */ + constructor(events = [], failures = 0) { + /** + * The number of failures in this run + * @type {number} + */ this.failures = failures; + /** + * All relevant events emitted from the {@link Runner}. + * @type {SerializableEvent[]} + */ this.events = events; - this.__type = SERIALIZABLE_RESULT_NAME; + + /** + * Symbol-like value needed to distinguish when attempting to deserialize + * this object (once it's been received over IPC). + * @type {Readonly<"SerializableWorkerResult">} + */ + Object.defineProperty(this, '__type', { + value: SERIALIZABLE_RESULT_NAME, + enumerable: true, + writable: false + }); } + /** + * Instantiates a new {@link SerializableWorkerResult}. + * @param {...any} args - Args to constructor + * @returns {SerilizableWorkerResult} + */ static create(...args) { return new SerializableWorkerResult(...args); } + /** + * Serializes each {@link SerializableEvent} in our `events` prop; + * makes this object read-only. + * @returns {Readonly} + */ serialize() { this.events.forEach(event => { event.serialize(); @@ -25,20 +65,30 @@ class SerializableWorkerResult { return Object.freeze(this); } + /** + * Deserializes a {@link SerializedWorkerResult} into something reporters can + * use; calls {@link SerializableEvent.deserialize} on each item in its + * `events` prop. + * @param {SerializedWorkerResult} obj + * @returns {SerializedWorkerResult} + */ static deserialize(obj) { - obj.events.forEach(SerializableEvent.deserialize); + obj.events.forEach(event => { + SerializableEvent.deserialize(event); + }); return obj; } /** - * Returns `true` if this is a {@link SerializableWorkerResult}, even if serialized - * (in other words, not an instance). - * + * Returns `true` if this is a {@link SerializedWorkerResult} or a + * {@link SerializableWorkerResult}. * @param {*} value - A value to check + * @returns {boolean} If true, it's deserializable */ - static isSerializableWorkerResult(value) { + static isSerializedWorkerResult(value) { return ( - type(value) === 'object' && value.__type === SERIALIZABLE_RESULT_NAME + value instanceof SerializableWorkerResult || + (type(value) === 'object' && value.__type === SERIALIZABLE_RESULT_NAME) ); } } @@ -47,28 +97,31 @@ class SerializableWorkerResult { * Represents an event, emitted by a {@link Runner}, which is to be transmitted * over IPC. * - * Due to the contents of the event data, it's not possible to send them verbatim. - * When received by the main process--and handled by reporters--these objects are - * expected to contain {@link Runnable} instances. This class provides facilities - * to perform the translation via serialization and deserialization. + * Due to the contents of the event data, it's not possible to send them + * verbatim. When received by the main process--and handled by reporters--these + * objects are expected to contain {@link Runnable} instances. This class + * provides facilities to perform the translation via serialization and + * deserialization. */ class SerializableEvent { /** * Constructs a `SerializableEvent`, throwing if we receive unexpected data. * - * Practically, events emitted from `Runner` have a minumum of zero (0) arguments-- - * (for example, {@link Runnable.constants.EVENT_RUN_BEGIN}) and a maximum of two (2) - * (for example, {@link Runnable.constants.EVENT_TEST_FAIL}, where the second argument - * is an `Error`). The first argument, if present, is a {@link Runnable}. - * This constructor's arguments adhere to this convention. + * Practically, events emitted from `Runner` have a minumum of zero (0) + * arguments-- (for example, {@link Runnable.constants.EVENT_RUN_BEGIN}) and a + * maximum of two (2) (for example, + * {@link Runnable.constants.EVENT_TEST_FAIL}, where the second argument is an + * `Error`). The first argument, if present, is a {@link Runnable}. This + * constructor's arguments adhere to this convention. * @param {string} eventName - A non-empty event name. - * @param {any} [originalValue] - Some data. Corresponds to extra arguments passed to `EventEmitter#emit`. + * @param {any} [originalValue] - Some data. Corresponds to extra arguments + * passed to `EventEmitter#emit`. * @param {Error} [originalError] - An error, if there's an error. * @throws If `eventName` is empty, or `originalValue` is a non-object. */ constructor(eventName, originalValue, originalError) { if (!eventName) { - throw new Error('expected a non-empty `eventName` argument'); + throw new Error('expected a non-empty `eventName` string argument'); } /** * The event name. @@ -114,63 +167,70 @@ class SerializableEvent { } /** - * Modifies this object *in place* (for theoretical memory consumption & performance - * reasons); serializes `SerializableEvent#originalValue` (placing the result in - * `SerializableEvent#data`) and `SerializableEvent#error`. Freezes this object. - * The result is an object that can be transmitted over IPC. + * Used internally by {@link SerilizableEvent#serialize}. + * @ignore + * @param {[object, string][]} pairs - List of parent/key tuples to process; modified in-place + * @param {object} parent - Some parent object + * @param {string} key - Key to inspect */ - serialize() { - // list of types within values that we will attempt to serialize + static _serialize(pairs, parent, key) { + let value = parent[key]; + switch (type(value)) { + case 'error': + // we need to reference the stack prop b/c it's lazily-loaded. + // `__type` is necessary for deserialization to create an `Error` later. + // fall through to the 'object' branch below to further process & remove + // any junk that an assertion lib may throw in there. + // `message` is apparently not enumerable, so we must handle it specifically. + value = Object.assign(Object.create(null), value, { + stack: value.stack, + message: value.message, + __type: 'Error' + }); + parent[key] = value; + // falls through + case 'object': + // by adding props to the `pairs` array, we will process it further + pairs.push( + ...Object.keys(value) + .filter(key => SERIALIZABLE_TYPES.has(type(value[key]))) + .map(key => [value, key]) + ); + break; + case 'function': + // we _may_ want to dig in to functions for some assertion libraries + // that might put a usable property on a function. + // for now, just zap it. + delete parent[key]; + break; + case 'array': + pairs.push( + ...value + .filter(value => SERIALIZABLE_TYPES.has(type(value))) + .map((value, index) => [value, index]) + ); + break; + } + } + /** + * Modifies this object *in place* (for theoretical memory consumption & + * performance reasons); serializes `SerializableEvent#originalValue` (placing + * the result in `SerializableEvent#data`) and `SerializableEvent#error`. + * Freezes this object. The result is an object that can be transmitted over + * IPC. + */ + serialize() { // given a parent object and a key, inspect the value and decide whether // to replace it, remove it, or add it to our `pairs` array to further process. // this is recursion in loop form. - const _serialize = (parent, key) => { - let value = parent[key]; - switch (type(value)) { - case 'error': - // we need to reference the stack prop b/c it's lazily-loaded. - // `__type` is necessary for deserialization to create an `Error` later. - // fall through to the 'object' branch below to further process & remove - // any junk that an assertion lib may throw in there. - // `message` is apparently not enumerable, so we must handle it specifically. - value = Object.assign(Object.create(null), value, { - stack: value.stack, - message: value.message, - __type: 'Error' - }); - parent[key] = value; - // falls through - case 'object': - // by adding props to the `pairs` array, we will process it further - pairs.push( - ...Object.keys(value) - .filter(key => SERIALIZABLE_TYPES.has(type(value[key]))) - .map(key => [value, key]) - ); - break; - case 'function': - // we _may_ want to dig in to functions for some assertion libraries - // that might put a usable property on a function. - // for now, just zap it. - delete parent[key]; - break; - case 'array': - pairs.push( - ...value - .filter(value => SERIALIZABLE_TYPES.has(type(value))) - .map((value, index) => [value, index]) - ); - break; - } - }; - + const originalValue = this.originalValue; const result = Object.assign(Object.create(null), { data: - type(this.originalValue) === 'object' && - type(this.originalValue.serialize) === 'function' - ? this.originalValue.serialize() - : this.originalValue, + type(originalValue) === 'object' && + type(originalValue.serialize) === 'function' + ? originalValue.serialize() + : originalValue, error: this.originalError }); @@ -178,7 +238,7 @@ class SerializableEvent { let pair; while ((pair = pairs.shift())) { - _serialize(...pair); + SerializableEvent._serialize(pairs, ...pair); } this.data = result.data; @@ -187,6 +247,57 @@ class SerializableEvent { return Object.freeze(this); } + /** + * Used internally by {@link SerializableEvent.deserialize}; creates an `Error` + * from an `Error`-like (serialized) object + * @ignore + * @param {Object} value - An Error-like value + * @returns {Error} Real error + */ + static _deserializeError(value) { + const error = new Error(value.message); + error.stack = value.stack; + Object.assign(error, value); + delete error.__type; + return error; + } + + /** + * Used internally by {@link SerializableEvent.deserialize}; recursively + * deserializes an object in-place. + * @param {object|Array} parent - Some object or array + * @param {string|number} key - Some prop name or array index within `parent` + */ + static _deserializeObject(parent, key) { + if (key === '__proto__') { + delete parent[key]; + return; + } + const value = parent[key]; + // keys beginning with `$$` are converted into functions returning the value + // and renamed, stripping the `$$` prefix. + // functions defined this way cannot be array members! + if (type(key) === 'string' && key.startsWith('$$')) { + const newKey = key.slice(2); + parent[newKey] = () => value; + delete parent[key]; + key = newKey; + } + if (type(value) === 'array') { + value.forEach((_, idx) => { + SerializableEvent._deserializeObject(value, idx); + }); + } else if (type(value) === 'object') { + if (value.__type === 'Error') { + parent[key] = SerializableEvent._deserializeError(value); + } else { + Object.keys(value).forEach(key => { + SerializableEvent._deserializeObject(value, key); + }); + } + } + } + /** * Deserialize value returned from a worker into something more useful. * Does not return the same object. @@ -195,42 +306,6 @@ class SerializableEvent { * @returns {SerializedEvent} Deserialized result */ static deserialize(obj) { - const createError = value => { - const error = new Error(value.message); - error.stack = value.stack; - Object.assign(error, value); - delete error.__type; - return error; - }; - const _deserialize = ([object, key]) => { - if (key === '__proto__') { - delete object[key]; - return; - } - const value = type(key) !== 'undefined' ? object[key] : object; - // keys beginning with `$$` are converted into functions returning the value - // and renamed, stripping the `$$` prefix - if (type(key) === 'string' && key.startsWith('$$')) { - const newKey = key.slice(2); - object[newKey] = () => value; - delete object[key]; - key = newKey; - } - if (type(value) === 'array') { - value.forEach((_, idx) => { - _deserialize([value, idx]); - }); - } else if (type(value) === 'object') { - if (value.__type === 'Error') { - object[key] = createError(value); - } else { - Object.keys(value) - .map(key => [value, key]) - .forEach(_deserialize); - } - } - }; - if (!obj) { throw createInvalidArgumentTypeError('Expected value', obj); } @@ -238,13 +313,13 @@ class SerializableEvent { obj = Object.assign(Object.create(null), obj); if (obj.data) { - Object.keys(obj.data) - .map(key => [obj.data, key]) - .forEach(_deserialize); + Object.keys(obj.data).forEach(key => { + SerializableEvent._deserializeObject(obj.data, key); + }); } if (obj.error) { - obj.error = createError(obj.error); + obj.error = SerializableEvent._deserializeError(obj.error); } return obj; @@ -256,7 +331,7 @@ class SerializableEvent { * * If value is an object and has a `serialize()` method, call that method; otherwise return the object and hope for the best. * - * @param {*} obj - A value to serialize + * @param {*} [value] - A value to serialize */ exports.serialize = function serialize(value) { return type(value) === 'object' && type(value.serialize) === 'function' @@ -270,12 +345,12 @@ exports.serialize = function serialize(value) { * This could be expanded with other objects that need deserialization, * but at present time we only care about {@link SerializableWorkerResult} objects. * - * @param {*} message - A "message" to deserialize + * @param {*} [value] - A "message" to deserialize */ -exports.deserialize = function deserialize(message) { - return SerializableWorkerResult.isSerializableWorkerResult(message) - ? SerializableWorkerResult.deserialize(message) - : message; +exports.deserialize = function deserialize(value) { + return SerializableWorkerResult.isSerializedWorkerResult(value) + ? SerializableWorkerResult.deserialize(value) + : value; }; exports.SerializableEvent = SerializableEvent; @@ -288,3 +363,12 @@ exports.SerializableWorkerResult = SerializableWorkerResult; * @property {object?} data - Optional serialized data * @property {object?} error - Optional serialized `Error` */ + +/** + * The result of calling `SerializableWorkerResult.serialize` as received + * by the deserializer. + * @typedef {Object} SerializedWorkerResult + * @property {number} failures - Number of failures + * @property {SerializedEvent[]} events - Serialized events + * @property {"SerializedWorkerResult"} __type - Symbol-like to denote the type of object this is + */ diff --git a/test/node-unit/serializer.spec.js b/test/node-unit/serializer.spec.js index 62567b31ec..56c8dfdad5 100644 --- a/test/node-unit/serializer.spec.js +++ b/test/node-unit/serializer.spec.js @@ -1,9 +1,14 @@ 'use strict'; const {createSandbox} = require('sinon'); -const {SerializableEvent} = require('../../lib/serializer'); +const { + serialize, + deserialize, + SerializableEvent, + SerializableWorkerResult +} = require('../../lib/serializer'); -describe('SerializableEvent', function() { +describe('serializer', function() { let sandbox; beforeEach(function() { @@ -14,238 +19,341 @@ describe('SerializableEvent', function() { sandbox.restore(); }); - describe('constructor', function() { - describe('when called without `eventName`', function() { - it('should throw', function() { - expect( - () => new SerializableEvent(), - 'to throw', - /expected a non-empty `eventName`/ - ); + describe('function', function() { + describe('serialize', function() { + describe('when passed a non-object value', function() { + it('should return the value', function() { + expect(serialize('knees & toes'), 'to be', 'knees & toes'); + }); }); - }); - describe('when called with a non-object `rawObject`', function() { - it('should throw', function() { - expect( - () => new SerializableEvent('blub', 'glug'), - 'to throw', - /expected object, received \[string\]/ - ); + describe('when passed an object value', function() { + describe('w/o a `serialize` method', function() { + it('should return the value', function() { + const obj = {}; + expect(serialize(obj), 'to be', obj); + }); + }); + + describe('having a `serialize` method', function() { + it('should return the result of the `serialize` method', function() { + const serializedObj = {foo: 'bar'}; + const obj = {serialize: sandbox.stub().returns(serializedObj)}; + expect(serialize(obj), 'to be', serializedObj); + }); + }); + }); + + describe('when not passed anything', function() { + it('should return `undefined`', function() { + expect(serialize(), 'to be undefined'); + }); }); }); - }); - describe('instance method', function() { - describe('serialize', function() { - it('should mutate the instance in-place', function() { - const evt = SerializableEvent.create('foo'); - expect(evt.serialize(), 'to be', evt); + describe('deserialize', function() { + describe('when passed nothing', function() { + it('should return `undefined`', function() { + expect(deserialize(), 'to be undefined'); + }); }); - it('should freeze the instance', function() { - expect( - Object.isFrozen(SerializableEvent.create('foo').serialize()), - 'to be true' - ); + describe('when passed a non-object value', function() { + it('should return the value', function() { + expect(deserialize(500), 'to be', 500); + }); }); - describe('when passed an object with a `serialize` method', function() { - it('should call the `serialize` method', function() { - const obj = { - serialize: sandbox.stub() - }; - SerializableEvent.create('some-event', obj).serialize(); - expect(obj.serialize, 'was called once'); + describe('when passed an object value which is not a SerializedWorkerResult', function() { + it('should return the value', function() { + const obj = {}; + expect(deserialize(obj), 'to be', obj); }); }); - describe('when passed an object containing a non-`serialize` method', function() { - it('should remove functions', function() { - const obj = { - func: () => {} - }; + describe('when passed a SerializedWorkerResult object', function() { + // note that SerializedWorkerResult is an interface (typedef), not a class. + it('should return the result of `SerializableWorkerResult.deserialize` called on the value', function() { + const obj = Object.assign({}, SerializableWorkerResult.create()); + sandbox + .stub(SerializableWorkerResult, 'deserialize') + .returns('butts'); + deserialize(obj); expect( - SerializableEvent.create('some-event', obj).serialize(), - 'to satisfy', + SerializableWorkerResult.deserialize, + 'to have a call satisfying', { - data: expect.it('not to have property', 'func') + args: [obj], + returned: 'butts' } ); }); }); + }); + }); - describe('when passed an object containing an array', function() { - it('should serialize the array', function() { - const obj = { - list: [{herp: 'derp'}, {bing: 'bong'}] - }; + describe('SerializableEvent', function() { + describe('constructor', function() { + describe('when called without `eventName`', function() { + it('should throw', function() { expect( - SerializableEvent.create('some-event', obj).serialize(), - 'to satisfy', - {data: {list: [{herp: 'derp'}, {bing: 'bong'}]}} + () => new SerializableEvent(), + 'to throw', + /expected a non-empty `eventName`/ ); }); }); - describe('when passed an error', function() { - it('should serialize the error', function() { - const obj = {}; - const err = new Error('monkeypants'); + describe('when called with a non-object `rawObject`', function() { + it('should throw', function() { expect( - SerializableEvent.create('some-event', obj, err).serialize(), - 'to satisfy', - { - eventName: 'some-event', - error: { - message: 'monkeypants', - stack: /^Error: monkeypants/, - __type: 'Error' - }, - data: obj - } + () => new SerializableEvent('blub', 'glug'), + 'to throw', + /expected object, received \[string\]/ ); }); + }); + }); - it('should retain own props', function() { - const obj = {}; - const err = new Error('monkeypants'); - err.code = 'MONKEY'; - expect( - SerializableEvent.create('some-event', obj, err).serialize(), - 'to satisfy', - { - eventName: 'some-event', - error: { - code: 'MONKEY', - message: 'monkeypants', - stack: /^Error: monkeypants/, - __type: 'Error' - }, - data: obj - } - ); + describe('instance method', function() { + describe('serialize', function() { + it('should mutate the instance in-place', function() { + const evt = SerializableEvent.create('foo'); + expect(evt.serialize(), 'to be', evt); }); - it('should not retain not-own props', function() { - const obj = {}; - const err = new Error('monkeypants'); - // eslint-disable-next-line no-proto - err.__proto__.code = 'MONKEY'; + it('should freeze the instance', function() { expect( - SerializableEvent.create('some-event', obj, err).serialize(), - 'to satisfy', - { - eventName: 'some-event', - error: { - message: 'monkeypants', - stack: /^Error: monkeypants/, - __type: 'Error' - }, - data: obj - } + Object.isFrozen(SerializableEvent.create('foo').serialize()), + 'to be true' ); }); - }); - describe('when passed an object containing a top-level prop with an Error value', function() { - it('should serialize the Error', function() { - const obj = { - monkeyError: new Error('pantsmonkey') - }; - const evt = SerializableEvent.create('some-event', obj); - expect(evt.serialize(), 'to satisfy', { - eventName: 'some-event', - data: { - monkeyError: { - message: 'pantsmonkey', - stack: /^Error: pantsmonkey/, - __type: 'Error' + describe('when passed an object with a `serialize` method', function() { + it('should call the `serialize` method', function() { + const obj = { + serialize: sandbox.stub() + }; + SerializableEvent.create('some-event', obj).serialize(); + expect(obj.serialize, 'was called once'); + }); + }); + + describe('when passed an object containing a non-`serialize` method', function() { + it('should remove functions', function() { + const obj = { + func: () => {} + }; + + expect( + SerializableEvent.create('some-event', obj).serialize(), + 'to satisfy', + { + data: expect.it('not to have property', 'func') } - } + ); }); }); - }); - describe('when passed an object containing a nested prop with an Error value', function() { - it('should serialize the Error', function() { - const obj = { - nestedObj: { + + describe('when passed an object containing an array', function() { + it('should serialize the array', function() { + const obj = { + list: [{herp: 'derp'}, {bing: 'bong'}] + }; + expect( + SerializableEvent.create('some-event', obj).serialize(), + 'to satisfy', + {data: {list: [{herp: 'derp'}, {bing: 'bong'}]}} + ); + }); + }); + + describe('when passed an error', function() { + it('should serialize the error', function() { + const obj = {}; + const err = new Error('monkeypants'); + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + + it('should retain own props', function() { + const obj = {}; + const err = new Error('monkeypants'); + err.code = 'MONKEY'; + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + code: 'MONKEY', + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + + it('should not retain not-own props', function() { + const obj = {}; + const err = new Error('monkeypants'); + // eslint-disable-next-line no-proto + err.__proto__.code = 'MONKEY'; + expect( + SerializableEvent.create('some-event', obj, err).serialize(), + 'to satisfy', + { + eventName: 'some-event', + error: { + message: 'monkeypants', + stack: /^Error: monkeypants/, + __type: 'Error' + }, + data: obj + } + ); + }); + }); + + describe('when passed an object containing a top-level prop with an Error value', function() { + it('should serialize the Error', function() { + const obj = { monkeyError: new Error('pantsmonkey') - } - }; - const evt = SerializableEvent.create('some-event', obj); - expect(evt.serialize(), 'to satisfy', { - eventName: 'some-event', - data: { - nestedObj: { + }; + const evt = SerializableEvent.create('some-event', obj); + expect(evt.serialize(), 'to satisfy', { + eventName: 'some-event', + data: { monkeyError: { message: 'pantsmonkey', stack: /^Error: pantsmonkey/, __type: 'Error' } } - } + }); + }); + }); + describe('when passed an object containing a nested prop with an Error value', function() { + it('should serialize the Error', function() { + const obj = { + nestedObj: { + monkeyError: new Error('pantsmonkey') + } + }; + const evt = SerializableEvent.create('some-event', obj); + expect(evt.serialize(), 'to satisfy', { + eventName: 'some-event', + data: { + nestedObj: { + monkeyError: { + message: 'pantsmonkey', + stack: /^Error: pantsmonkey/, + __type: 'Error' + } + } + } + }); }); }); }); }); - }); - describe('static method', function() { - describe('deserialize', function() { - describe('when passed a falsy parameter', function() { - it('should throw "invalid arg type" error', function() { - expect(SerializableEvent.deserialize, 'to throw', { - code: 'ERR_MOCHA_INVALID_ARG_TYPE' + describe('static method', function() { + describe('deserialize', function() { + describe('when passed a falsy parameter', function() { + it('should throw "invalid arg type" error', function() { + expect(SerializableEvent.deserialize, 'to throw', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); }); }); - }); - it('should return a new object w/ null prototype', function() { - const obj = {bob: 'bob'}; - expect(SerializableEvent.deserialize(obj), 'to satisfy', obj) - .and('not to equal', obj) - .and('not to have property', 'constructor'); - }); + it('should return a new object w/ null prototype', function() { + const obj = {bob: 'bob'}; + expect(SerializableEvent.deserialize(obj), 'to satisfy', obj) + .and('not to equal', obj) + .and('not to have property', 'constructor'); + }); + + describe('when passed value contains `data` prop', function() { + it('should ignore __proto__', function() { + const obj = { + data: Object.create(null) + }; + // eslint-disable-next-line no-proto + obj.data.__proto__ = {peaches: 'prunes'}; - describe('when passed value contains `data` prop', function() { - it('should ignore __proto__', function() { - const obj = { - data: Object.create(null) - }; - // eslint-disable-next-line no-proto - obj.data.__proto__ = {peaches: 'prunes'}; + const expected = Object.assign(Object.create(null), { + data: Object.create(null) + }); + expect(SerializableEvent.deserialize(obj), 'to equal', expected); + }); + + describe('when `data` prop contains a nested serialized Error prop', function() { + it('should create an Error instance from the nested serialized Error prop', function() { + const message = 'problems!'; + const stack = 'problem instructions'; + const code = 'EIEIO'; + const expected = Object.assign(Object.create(null), { + data: { + whoops: Object.assign(new Error(message), { + stack, + code + }) + } + }); - const expected = Object.assign(Object.create(null), { - data: Object.create(null) + expect( + SerializableEvent.deserialize({ + data: { + whoops: { + message, + stack, + code, + __type: 'Error' + } + } + }), + 'to equal', + expected + ); + }); }); - expect(SerializableEvent.deserialize(obj), 'to equal', expected); }); - describe('when `data` prop contains a nested serialized Error prop', function() { - it('should create an Error instance from the nested serialized Error prop', function() { + describe('when passed value contains an `error` prop', function() { + it('should create an Error instance from the prop', function() { const message = 'problems!'; const stack = 'problem instructions'; const code = 'EIEIO'; const expected = Object.assign(Object.create(null), { - data: { - whoops: Object.assign(new Error(message), { - stack, - code - }) - } + error: Object.assign(new Error(message), { + stack, + code + }) }); expect( SerializableEvent.deserialize({ - data: { - whoops: { - message, - stack, - code, - __type: 'Error' - } + error: { + message, + stack, + code, + __type: 'Error' } }), 'to equal', @@ -253,42 +361,192 @@ describe('SerializableEvent', function() { ); }); }); + + describe('when passed value data contains a prop beginning with "$$"', function() { + let result; + + beforeEach(function() { + result = SerializableEvent.deserialize({data: {$$foo: 'bar'}}); + }); + it('should create a new prop having a function value', function() { + expect(result, 'to satisfy', { + data: { + foo: expect.it('to be a function') + } + }); + }); + + it('should create a new prop returning the original value', function() { + expect(result.data.foo(), 'to equal', 'bar'); + }); + + it('should remove the prop with the "$$" prefix', function() { + expect(result, 'not to have property', '$$foo'); + }); + }); + + describe('when the value data contains a prop with an array value', function() { + beforeEach(function() { + sandbox.spy(SerializableEvent, '_deserializeObject'); + }); + + it('should deserialize each prop', function() { + const obj = {data: {foo: [{bar: 'baz'}]}}; + SerializableEvent.deserialize(obj); + expect( + SerializableEvent._deserializeObject, + 'to have a call satisfying', + { + args: [obj.data.foo, 0] + } + ); + }); + }); }); - describe('when passed value contains an `error` prop', function() { - it('should create an Error instance from the prop', function() { - const message = 'problems!'; - const stack = 'problem instructions'; - const code = 'EIEIO'; - const expected = Object.assign(Object.create(null), { - error: Object.assign(new Error(message), { - stack, - code - }) + describe('create', function() { + it('should instantiate a SerializableEvent', function() { + expect( + SerializableEvent.create('some-event'), + 'to be a', + SerializableEvent + ); + }); + }); + }); + }); + + describe('SerializableWorkerResult', function() { + describe('static method', function() { + describe('create', function() { + it('should return a new SerializableWorkerResult instance', function() { + expect( + SerializableWorkerResult.create(), + 'to be a', + SerializableWorkerResult + ); + }); + }); + + describe('isSerializedWorkerResult', function() { + describe('when passed an instance', function() { + it('should return `true`', function() { + expect( + SerializableWorkerResult.isSerializedWorkerResult( + new SerializableWorkerResult() + ), + 'to be true' + ); + }); + }); + + describe('when passed an object with an appropriate `__type` prop', function() { + it('should return `true`', function() { + // this is the most likely use-case, as the object is transmitted over IPC + // and loses its prototype + const original = new SerializableWorkerResult(); + const clone = Object.assign({}, original); + expect( + SerializableWorkerResult.isSerializedWorkerResult(clone), + 'to be true' + ); + }); + }); + + describe('when passed an object without an appropriate `__type` prop', function() { + it('should return `false`', function() { + expect( + SerializableWorkerResult.isSerializedWorkerResult({ + mister: 'mister' + }), + 'to be false' + ); }); + }); + }); + describe('deserialize', function() { + beforeEach(function() { + sandbox.stub(SerializableEvent, 'deserialize'); + }); + + it('should call SerializableEvent#deserialize on each item in its `events` prop', function() { + const result = Object.assign( + {}, + SerializableWorkerResult.create([ + {eventName: 'foo'}, + {eventName: 'bar'} + ]) + ); + SerializableWorkerResult.deserialize(result); + expect(SerializableEvent.deserialize, 'to have calls satisfying', [ + {args: [{eventName: 'foo'}]}, + {args: [{eventName: 'bar'}]} + ]); + }); + + it('should return the deserialized value', function() { + const result = Object.assign( + {}, + SerializableWorkerResult.create([ + {eventName: 'foo'}, + {eventName: 'bar'} + ]) + ); expect( - SerializableEvent.deserialize({ - error: { - message, - stack, - code, - __type: 'Error' - } - }), + SerializableWorkerResult.deserialize(result), 'to equal', - expected + result ); }); }); }); - describe('create', function() { - it('should instantiate a SerializableEvent', function() { + describe('instance method', function() { + describe('serialize', function() { + it('should return a read-only value', function() { + expect( + Object.isFrozen(SerializableWorkerResult.create().serialize()), + 'to be true' + ); + }); + + it('should call `SerializableEvent#serialize` of each of its events', function() { + sandbox.spy(SerializableEvent.prototype, 'serialize'); + const events = [ + SerializableEvent.create('foo'), + SerializableEvent.create('bar') + ]; + SerializableWorkerResult.create(events).serialize(); + expect( + SerializableEvent.prototype.serialize, + 'to have calls satisfying', + [{thisValue: events[0]}, {thisValue: events[1]}] + ); + }); + }); + }); + describe('constructor', function() { + // the following two tests should be combined into one, but not sure how to express + // as a single assertion + + it('should add a `__type` prop', function() { expect( - SerializableEvent.create('some-event'), - 'to be a', - SerializableEvent + new SerializableWorkerResult(), + 'to have property', + '__type', + 'SerializableWorkerResult' + ); + }); + + it('should not allow writing to the `__type` prop', function() { + const result = new SerializableWorkerResult(); + expect( + () => { + result.__type = 'a bird'; + }, + 'to throw', + TypeError ); }); }); From d154141b0cc34c9091fc70b2358bef8434017242 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 23 Mar 2020 16:02:20 -0700 Subject: [PATCH 10/92] increase timeout for options tests Signed-off-by: Christopher Hiller --- test/node-unit/cli/options.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/node-unit/cli/options.spec.js b/test/node-unit/cli/options.spec.js index d60de8e268..56400971ac 100644 --- a/test/node-unit/cli/options.spec.js +++ b/test/node-unit/cli/options.spec.js @@ -58,7 +58,7 @@ describe('options', function() { describe('loadOptions()', function() { describe('when no parameter provided', function() { beforeEach(function() { - this.timeout(500); + this.timeout(1000); readFileSync = sandbox.stub(); readFileSync.onFirstCall().returns('{}'); findConfig = sandbox.stub().returns('/some/.mocharc.json'); From 76105e7dcfe6c556720dc4867265ff08d7b43371 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 24 Mar 2020 14:22:47 -0700 Subject: [PATCH 11/92] upgrade unexpected-eventemitter Signed-off-by: Christopher Hiller --- package-lock.json | 195 +++++++++++++++++++++++++++++++++------------- package.json | 2 +- 2 files changed, 143 insertions(+), 54 deletions(-) diff --git a/package-lock.json b/package-lock.json index a208758447..ae095c5af5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1039,6 +1039,17 @@ "es-abstract": "^1.17.0-next.1" } }, + "array.prototype.map": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", + "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "es-array-method-boxes-properly": "^1.0.0", + "is-string": "^1.0.4" + } + }, "arraybuffer.slice": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", @@ -1826,6 +1837,16 @@ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.0.0.tgz", "integrity": "sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==" }, + "bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dev": true, + "optional": true, + "requires": { + "file-uri-to-path": "1.0.0" + } + }, "bl": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.2.tgz", @@ -1851,7 +1872,7 @@ "bn.js": { "version": "4.11.8", "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz", - "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA==", + "integrity": "sha1-LN4J617jQfSEdGuwMJsyU7GxRC8=", "dev": true }, "body-parser": { @@ -2203,6 +2224,7 @@ "dev": true, "optional": true, "requires": { + "bindings": "^1.5.0", "nan": "^2.12.1", "node-pre-gyp": "*" }, @@ -2886,7 +2908,7 @@ }, "yargs": { "version": "6.4.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-6.4.0.tgz", + "resolved": "http://registry.npmjs.org/yargs/-/yargs-6.4.0.tgz", "integrity": "sha1-gW4ahm1VmMzzTlWW3c4i2S2kkNQ=", "dev": true, "requires": { @@ -2908,7 +2930,7 @@ }, "yargs-parser": { "version": "4.2.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-4.2.1.tgz", + "resolved": "http://registry.npmjs.org/yargs-parser/-/yargs-parser-4.2.1.tgz", "integrity": "sha1-KczqwNxPA8bIe0qfIX3RjJ90hxw=", "dev": true, "requires": { @@ -3553,7 +3575,7 @@ "cipher-base": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", - "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "integrity": "sha1-h2Dk7MJy9MNjUy+SbYdKriwTl94=", "dev": true, "requires": { "inherits": "^2.0.1", @@ -4272,7 +4294,7 @@ "createerror": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/createerror/-/createerror-1.3.0.tgz", - "integrity": "sha512-w9UZUtkaGd8MfS7eMG7Sa0lV5vCJghqQfiOnwNVrPhbZScUp5h0jwYoAF933MKlotlG1JAJOCCT3xU6r+SDKNw==", + "integrity": "sha1-xma9TNa5TjVBU5ZWnUZJ3QzbMxM=", "dev": true }, "cross-env": { @@ -5456,7 +5478,6 @@ "version": "1.17.5", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", - "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", @@ -5471,11 +5492,36 @@ "string.prototype.trimright": "^2.1.1" } }, + "es-array-method-boxes-properly": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", + "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==" + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + }, + "dependencies": { + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + } + } + }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -6137,7 +6183,7 @@ "evp_bytestokey": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", - "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "integrity": "sha1-f8vbGY3HGVlDLv4ThCaE4FJaywI=", "dev": true, "requires": { "md5.js": "^1.3.4", @@ -6577,6 +6623,13 @@ "integrity": "sha1-LdvqfHP/42No365J3DOMBYwritY=", "dev": true }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "dev": true, + "optional": true + }, "filename-reserved-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", @@ -6701,13 +6754,13 @@ "dependencies": { "colors": { "version": "0.6.2", - "resolved": "https://registry.npmjs.org/colors/-/colors-0.6.2.tgz", + "resolved": "http://registry.npmjs.org/colors/-/colors-0.6.2.tgz", "integrity": "sha1-JCP+ZnisDF2uiFLl0OW+CMmXq8w=", "dev": true }, "commander": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.1.0.tgz", + "resolved": "http://registry.npmjs.org/commander/-/commander-2.1.0.tgz", "integrity": "sha1-0SG7roYNmZKj1Re6lvVliOR8Z4E=", "dev": true } @@ -6962,7 +7015,7 @@ }, "strip-ansi": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "dev": true, "requires": { @@ -7175,7 +7228,7 @@ }, "got": { "version": "6.7.1", - "resolved": "https://registry.npmjs.org/got/-/got-6.7.1.tgz", + "resolved": "http://registry.npmjs.org/got/-/got-6.7.1.tgz", "integrity": "sha1-JAzQV4WpoY5WHcG0S0HHY+8ejbA=", "dev": true, "requires": { @@ -7297,7 +7350,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, "requires": { "function-bind": "^1.1.1" } @@ -7549,7 +7601,7 @@ "html-encoding-sniffer": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz", - "integrity": "sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw==", + "integrity": "sha1-5w2EuU2lOqN14R/jo1G+ZkLKRvg=", "dev": true, "requires": { "whatwg-encoding": "^1.0.1" @@ -8238,6 +8290,11 @@ "is-decimal": "^1.0.0" } }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==" + }, "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", @@ -8260,8 +8317,7 @@ "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", - "dev": true + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==" }, "is-ci": { "version": "1.2.1", @@ -8315,8 +8371,7 @@ "is-date-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", - "dev": true + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" }, "is-decimal": { "version": "1.0.4", @@ -8419,6 +8474,11 @@ "integrity": "sha1-gV0USxTVac7L6tTVaTCX8Aqb9sU=", "dev": true }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==" + }, "is-natural-number": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", @@ -8507,7 +8567,7 @@ "is-plain-object": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", - "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "integrity": "sha1-LBY7P6+xtgbZ0Xko8FwqHDjgdnc=", "dev": true, "requires": { "isobject": "^3.0.1" @@ -8529,7 +8589,6 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", - "dev": true, "requires": { "has": "^1.0.3" } @@ -8561,6 +8620,11 @@ "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", "dev": true }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==" + }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", @@ -8570,8 +8634,7 @@ "is-string": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", - "dev": true + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==" }, "is-svg": { "version": "3.0.0", @@ -8586,7 +8649,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", - "dev": true, "requires": { "has-symbols": "^1.0.1" } @@ -8820,6 +8882,20 @@ "is-object": "^1.0.1" } }, + "iterate-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", + "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==" + }, + "iterate-value": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", + "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", + "requires": { + "es-get-iterator": "^1.0.2", + "iterate-iterator": "^1.0.1" + } + }, "javascript-stringify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.0.1.tgz", @@ -9731,7 +9807,7 @@ }, "strip-ansi": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", "dev": true, "requires": { @@ -9925,7 +10001,7 @@ }, "yargs": { "version": "6.6.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-6.6.0.tgz", + "resolved": "http://registry.npmjs.org/yargs/-/yargs-6.6.0.tgz", "integrity": "sha1-eC7CHvQDNF+DCoCMo9UTr1YGUgg=", "dev": true, "requires": { @@ -9946,7 +10022,7 @@ }, "yargs-parser": { "version": "4.2.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-4.2.1.tgz", + "resolved": "http://registry.npmjs.org/yargs-parser/-/yargs-parser-4.2.1.tgz", "integrity": "sha1-KczqwNxPA8bIe0qfIX3RjJ90hxw=", "dev": true, "requires": { @@ -10422,7 +10498,7 @@ }, "globby": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-6.1.0.tgz", + "resolved": "http://registry.npmjs.org/globby/-/globby-6.1.0.tgz", "integrity": "sha1-9abXDoOV4hyFj7BInWTfAkJNUGw=", "dev": true, "requires": { @@ -10511,7 +10587,7 @@ "markdown-toc": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/markdown-toc/-/markdown-toc-1.2.0.tgz", - "integrity": "sha512-eOsq7EGd3asV0oBfmyqngeEIhrbkc7XVP63OwcJBIhH2EpG2PzFcbZdhy1jutXSlRBBVMNXHvMtSr5LAxSUvUg==", + "integrity": "sha1-RKFWBoREkDFK/ARESD+eexEiwzk=", "dev": true, "requires": { "concat-stream": "^1.5.2", @@ -10905,7 +10981,7 @@ "miller-rabin": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/miller-rabin/-/miller-rabin-4.0.1.tgz", - "integrity": "sha512-115fLhvZVqWwHPbClyntxEVfVDfl9DLLTuJvq3g2O/Oxi8AiNouAHvDSzHS0viUJc+V5vm3eq91Xwqn9dp4jRA==", + "integrity": "sha1-8IA1HIZbDcViqEYpZtqlNUPHik0=", "dev": true, "requires": { "bn.js": "^4.0.0", @@ -11193,7 +11269,7 @@ "no-case": { "version": "2.3.2", "resolved": "https://registry.npmjs.org/no-case/-/no-case-2.3.2.tgz", - "integrity": "sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==", + "integrity": "sha1-YLgTOWvjmz8SiKTB7V0efSi0ZKw=", "dev": true, "requires": { "lower-case": "^1.1.1" @@ -11300,7 +11376,7 @@ }, "tty-browserify": { "version": "0.0.0", - "resolved": "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", + "resolved": "http://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz", "integrity": "sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=", "dev": true }, @@ -11944,8 +12020,7 @@ "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", - "dev": true + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==" }, "object-keys": { "version": "1.1.1", @@ -12049,7 +12124,7 @@ }, "opn": { "version": "5.3.0", - "resolved": "https://registry.npmjs.org/opn/-/opn-5.3.0.tgz", + "resolved": "http://registry.npmjs.org/opn/-/opn-5.3.0.tgz", "integrity": "sha512-bYJHo/LOmoTd+pfiYhfZDnf9zekVJrY+cnS2a5F2x+w5ppvTqObojTP7WiFG+kVZs9Inw+qQ/lw7TroWwhdd2g==", "dev": true, "requires": { @@ -12458,7 +12533,7 @@ }, "path-is-absolute": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "resolved": "http://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=" }, "path-is-inside": { @@ -12622,7 +12697,7 @@ "postcss": { "version": "5.2.18", "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", + "integrity": "sha1-ut+hSX1GJE9jkPWLMZgw2RB4U8U=", "dev": true, "requires": { "chalk": "^1.1.3", @@ -13469,7 +13544,7 @@ "postcss": { "version": "5.2.18", "resolved": "https://registry.npmjs.org/postcss/-/postcss-5.2.18.tgz", - "integrity": "sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==", + "integrity": "sha1-ut+hSX1GJE9jkPWLMZgw2RB4U8U=", "dev": true, "requires": { "chalk": "^1.1.3", @@ -13630,13 +13705,13 @@ }, "pretty-bytes": { "version": "4.0.2", - "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-4.0.2.tgz", + "resolved": "http://registry.npmjs.org/pretty-bytes/-/pretty-bytes-4.0.2.tgz", "integrity": "sha1-sr+C5zUNZcbDOqlaqlpPYyf2HNk=", "dev": true }, "pretty-ms": { "version": "0.2.2", - "resolved": "https://registry.npmjs.org/pretty-ms/-/pretty-ms-0.2.2.tgz", + "resolved": "http://registry.npmjs.org/pretty-ms/-/pretty-ms-0.2.2.tgz", "integrity": "sha1-2oeaaC/zOjcBEEbxPWJ/Z8c7hPY=", "dev": true, "requires": { @@ -13688,6 +13763,18 @@ "asap": "~2.0.3" } }, + "promise.allsettled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", + "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", + "requires": { + "array.prototype.map": "^1.0.1", + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "iterate-value": "^1.0.0" + } + }, "proto-list": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", @@ -13841,7 +13928,7 @@ }, "yargs": { "version": "3.10.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", + "resolved": "http://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", "integrity": "sha1-9+572FfdfB0tOMDnTvvWgdFDH9E=", "dev": true, "requires": { @@ -14570,7 +14657,7 @@ }, "rgba-regex": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz", + "resolved": "http://registry.npmjs.org/rgba-regex/-/rgba-regex-1.0.0.tgz", "integrity": "sha1-QzdOLiyglosO8VI0YLfXMP8i7rM=", "dev": true }, @@ -14710,7 +14797,7 @@ "sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", - "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "integrity": "sha1-KBYjTiN4vdxOU1T6tcqold9xANk=", "dev": true }, "saxes": { @@ -16062,7 +16149,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -16072,7 +16158,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16083,7 +16168,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16094,7 +16178,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -16190,7 +16273,7 @@ }, "strip-eof": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", + "resolved": "http://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", "dev": true }, @@ -16712,7 +16795,7 @@ }, "strip-ansi": { "version": "0.1.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", + "resolved": "http://registry.npmjs.org/strip-ansi/-/strip-ansi-0.1.1.tgz", "integrity": "sha1-OeipjQRNFQZgq+SmgIrPcLt7yZE=", "dev": true } @@ -16749,7 +16832,7 @@ "tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "integrity": "sha1-bTQzWIl2jSGyvNoKonfO07G/rfk=", "dev": true, "requires": { "os-tmpdir": "~1.0.2" @@ -17092,9 +17175,9 @@ "dev": true }, "unexpected-eventemitter": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/unexpected-eventemitter/-/unexpected-eventemitter-1.1.3.tgz", - "integrity": "sha512-30MfVuCOCSEvUzNUErYZ3ZzLiPOgADcJsyxi+0Z5bhwgI/Yv4xHR/2v/YEe2alaEXDdkteCQ4gLBfa5/J2iTPA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unexpected-eventemitter/-/unexpected-eventemitter-2.0.0.tgz", + "integrity": "sha512-/dQtSU7l5/1jzx+ZD2GKufh3J+rupNoIQC0Wk6bQz2qLwHKjfY8GaCXSdykaSpQHRNDr/2LgeFnK0In5cfQwPA==", "dev": true }, "unexpected-sinon": { @@ -17666,6 +17749,7 @@ "dev": true, "optional": true, "requires": { + "bindings": "^1.5.0", "nan": "^2.12.1", "node-pre-gyp": "*" }, @@ -18308,7 +18392,7 @@ "webidl-conversions": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-4.0.2.tgz", - "integrity": "sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg==", + "integrity": "sha1-qFWYCx8LazWbodXZ+zmulB+qY60=", "dev": true }, "whatwg-encoding": { @@ -18416,6 +18500,11 @@ "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", "dev": true }, + "workerpool": { + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-5.0.4.tgz", + "integrity": "sha512-Sywova24Ow2NQ24JPB68bI89EdqMDjUXo4OpofK/QMD7C2ZVMloYBgQ5J3PChcBJHj2vspsmGx1/3nBKXtUkXQ==" + }, "wrap-ansi": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", diff --git a/package.json b/package.json index 5ef2f1da5b..c0283ec814 100644 --- a/package.json +++ b/package.json @@ -123,7 +123,7 @@ "through2": "^3.0.1", "to-vfile": "^6.1.0", "unexpected": "^11.13.0", - "unexpected-eventemitter": "^1.1.3", + "unexpected-eventemitter": "^2.0.0", "unexpected-sinon": "^10.11.2", "uslug": "^1.0.4", "watchify": "^3.11.1" From def48158c3d57358ccaf48f307b7168b2b14cc4f Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 24 Mar 2020 14:24:00 -0700 Subject: [PATCH 12/92] add some tests for BufferedRunner - refactor `BufferedRunner#run` to be a void fn - tweak some "bail" handling Signed-off-by: Christopher Hiller --- lib/buffered-runner.js | 157 ++++++++++++-------- test/node-unit/buffered-runner.spec.js | 195 +++++++++++++++++++++++++ 2 files changed, 288 insertions(+), 64 deletions(-) create mode 100644 test/node-unit/buffered-runner.spec.js diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 031fc5c50c..4921a3126c 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -7,6 +7,8 @@ const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; const debug = require('debug')('mocha:buffered-runner'); const workerpool = require('workerpool'); const {deserialize} = require('./serializer'); +const {type} = require('./utils'); +const WORKER_PATH = require.resolve('./worker.js'); /** * This `Runner` delegates tests runs to worker threads. Does not execute any @@ -32,77 +34,104 @@ class BufferedRunner extends Runner { * @param {Object} options * @param {string[]} options.files - List of test files * @param {Options} option.opts - Command-line options - * @returns {Promise} */ - async run(callback, {files, opts}) { - const jobs = opts.jobs || os.cpus().length - 1; - debug('starting pool with %d max workers', jobs); - const pool = workerpool.pool(require.resolve('./worker.js'), { - workerType: 'process', - maxWorkers: jobs - }); + run(callback, {files, opts}) { + // This function should _not_ return a `Promise`; its parent + // (`Runner#run`) is a void function, so this should be as well. + // However, we want to make use of `async`/`await`, so we use this + // IIFE. + (async () => { + try { + const jobs = opts.jobs || os.cpus().length - 1; + debug('starting pool with %d max workers', jobs); + const pool = workerpool.pool(WORKER_PATH, { + workerType: 'process', + maxWorkers: jobs + }); + const globalBail = type(opts.bail) === 'boolean' && opts.bail; - let exitCode = 0; - let aborted = false; + let exitCode = 0; + let aborted = false; - this.emit(EVENT_RUN_BEGIN); + this.emit(EVENT_RUN_BEGIN); - const poolProxy = await pool.proxy(); - const results = await allSettled( - files.map(async file => { - debug('enqueueing test file %s', file); - try { - const {failures, events} = deserialize( - await poolProxy.run(file, opts) - ); - debug( - 'completed run of file %s; %d failures / %d events', - file, - failures, - events.length - ); - exitCode += failures; // can this be non-numeric? - let event = events.shift(); - let shouldAbort = false; - while (event) { - this.emit(event.eventName, event.data, event.error); - if (failures && event.data._bail) { - debug('terminating pool due to "bail" flag'); - shouldAbort = true; + const poolProxy = await pool.proxy(); + const results = await allSettled( + files.map(async file => { + debug('enqueueing test file %s', file); + try { + const {failures, events} = deserialize( + await poolProxy.run(file, opts) + ); + debug( + 'completed run of file %s; %d failures / %d events', + file, + failures, + events.length + ); + exitCode += failures; // can this be non-numeric? + let event = events.shift(); + let shouldAbort = false; + while (event) { + this.emit(event.eventName, event.data, event.error); + // prefer event.data._bail over globalBail, if the former is + // set, since it could be disabled on any given Runnable. + // if event.data is falsy, event.error will be as well. + if ( + event.error && + event.data && + (type(event.data._bail) === 'boolean' + ? event.data._bail + : globalBail) + ) { + debug('terminating pool due to "bail" flag'); + shouldAbort = true; + } + event = events.shift(); + } + if (failures && opts.bail) { + shouldAbort = true; + } + if (shouldAbort) { + aborted = true; + await pool.terminate(true); + } + } catch (err) { + if (!aborted) { + debug('terminating pool due to uncaught exception'); + await pool.terminate(true); + this.uncaught(err); + } + } finally { + debug('done running file %s', file); } - event = events.shift(); - } - if (shouldAbort) { - aborted = true; - await pool.terminate(true); - } - } catch (err) { - if (!aborted) { - debug('terminating pool due to uncaught exception'); - await pool.terminate(true); - this.uncaught(err); - } - } finally { - debug('done running file %s', file); - } - }) - ); + }) + ); - await pool.terminate(); + await pool.terminate(); - // XXX I'm not sure this is ever non-empty - const uncaughtExceptions = results.filter( - ({status}) => status === 'rejected' - ); - if (uncaughtExceptions.length) { - debug('found %d uncaught exceptions', uncaughtExceptions.length); - process.nextTick(() => { - throw uncaughtExceptions.shift(); - }); - } - this.emit(EVENT_RUN_END); - debug('exiting with code %d', exitCode); - callback(exitCode); + // XXX I'm not sure this is ever non-empty + const uncaughtExceptions = results.filter( + ({status}) => status === 'rejected' + ); + if (uncaughtExceptions.length) { + debug('found %d uncaught exceptions', uncaughtExceptions.length); + process.nextTick(() => { + throw uncaughtExceptions.shift(); + }); + } + this.emit(EVENT_RUN_END); + debug('exiting with code %d', exitCode); + callback(exitCode); + } catch (err) { + // any error caught here should be considered unrecoverable, + // since it will have come out of the worker pool. + console.log(err); + process.nextTick(() => { + throw err; + }); + } + })(); } } diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js new file mode 100644 index 0000000000..fe4e3a09af --- /dev/null +++ b/test/node-unit/buffered-runner.spec.js @@ -0,0 +1,195 @@ +'use strict'; + +const os = require('os'); +const { + EVENT_RUN_BEGIN, + EVENT_TEST_PASS, + EVENT_TEST_FAIL, + EVENT_SUITE_END +} = require('../../lib/runner').constants; +const rewiremock = require('rewiremock/node'); +const BUFFERED_RUNNER_PATH = require.resolve('../../lib/buffered-runner.js'); +const Suite = require('../../lib/suite'); +const {createSandbox} = require('sinon'); + +describe('buffered-runner', function() { + describe('BufferedRunner', function() { + let sandbox; + let pool; + let run; + let terminate; + let BufferedRunner; + let suite; + + beforeEach(function() { + sandbox = createSandbox(); + rewiremock.enable(); + + suite = new Suite('a root suite', {}, true); + + // tests will want to further define the behavior of these. + run = sandbox.stub(); + terminate = sandbox.stub(); + + pool = sandbox.stub().returns({ + proxy: sandbox.stub().resolves({ + run + }), + terminate + }); + BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, () => ({ + workerpool: { + pool + } + })); + }); + + describe('instance method', function() { + describe('run', function() { + let runner; + + beforeEach(function() { + runner = new BufferedRunner(suite); + }); + + // the purpose of this is to ensure that--despite using `Promise`s + // internally--`BufferedRunner#run` does not return a `Promise`. + it('should return `undefined`', function(done) { + expect(runner.run(done, {files: [], opts: {}}), 'to be undefined'); + }); + + it('should emit `EVENT_RUN_BEGIN`', async function() { + return expect( + () => + new Promise(resolve => { + runner.run(resolve, {files: [], opts: {}}); + }), + 'to emit from', + runner, + EVENT_RUN_BEGIN + ); + }); + + describe('when not provided a max job count', function() { + it('should use a max job count based on CPU cores', function(done) { + runner.run( + () => { + expect(pool, 'to have a call satisfying', { + args: [ + expect.it('to be a', 'string'), + { + maxWorkers: os.cpus().length - 1 + } + ] + }); + done(); + }, + {files: [], opts: {}} + ); + }); + }); + + describe('when provided a max job count', function() { + it('should use the provided max count', function(done) { + runner.run( + () => { + expect(pool, 'to have a call satisfying', { + args: [ + expect.it('to be a', 'string'), + { + maxWorkers: 2 + } + ] + }); + done(); + }, + { + files: [], + opts: { + jobs: 2 + } + } + ); + }); + }); + + describe('when provided bail flag', function() { + describe('when no event contains an error', function() { + it('should not force-terminate', function(done) { + run.resolves({ + failures: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have a call satisfying', { + args: [] + }).and('was called once'); + done(); + }, + {files: ['some-file.js'], opts: {bail: true}} + ); + }); + }); + + describe('when an event contains an error', function() { + it('should force-terminate', function(done) { + const err = { + __type: 'Error', + message: 'oh no' + }; + Error.captureStackTrace(err); + run.resolves({ + failures: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: [true]}, + {args: []} + ]); + done(); + }, + {files: ['some-file.js'], opts: {bail: true}} + ); + }); + }); + }); + }); + }); + + afterEach(function() { + rewiremock.disable(); + }); + }); +}); From 20c660463f9090d00a427b9a41889c36318394dc Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 25 Mar 2020 15:31:51 -0700 Subject: [PATCH 13/92] add more buffered-runner tests - remove `promise.allsettled` shim as may not be needed - update `unexpected` to latest, which gives us a new assertion - tweak a unit test for `Runner` to leverage `unexpected-eventemitter` - tweak wallaby settings; increase test timeout --- .wallaby.js | 2 +- lib/buffered-runner.js | 40 +-- package.json | 3 +- test/node-unit/buffered-runner.spec.js | 366 +++++++++++++++++++++++-- test/node-unit/serializer.spec.js | 18 +- test/unit/runner.spec.js | 12 +- 6 files changed, 374 insertions(+), 67 deletions(-) diff --git a/.wallaby.js b/.wallaby.js index a54f3576fc..4bf3f6b5dc 100644 --- a/.wallaby.js +++ b/.wallaby.js @@ -36,7 +36,7 @@ module.exports = () => { // running mocha instance is not the same as mocha under test, // running mocha is the project's source code mocha, mocha under test is instrumented version of the source code const runningMocha = wallaby.testFramework; - runningMocha.timeout(200); + runningMocha.timeout(1000); // to expose it/describe etc. on the mocha under test const MochaUnderTest = require('./'); const mochaUnderTest = new MochaUnderTest(); diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 4921a3126c..1c5fb6e82e 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -1,6 +1,5 @@ 'use strict'; -const allSettled = require('promise.allsettled'); const os = require('os'); const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; @@ -51,18 +50,19 @@ class BufferedRunner extends Runner { const globalBail = type(opts.bail) === 'boolean' && opts.bail; let exitCode = 0; - let aborted = false; + let didAbort = false; this.emit(EVENT_RUN_BEGIN); const poolProxy = await pool.proxy(); - const results = await allSettled( + await Promise.all( files.map(async file => { debug('enqueueing test file %s', file); try { - const {failures, events} = deserialize( - await poolProxy.run(file, opts) - ); + debug('calling run with: %O', [file, opts]); + const result = await poolProxy.run(file, opts); + debug(result); + const {failures, events} = deserialize(result); debug( 'completed run of file %s; %d failures / %d events', file, @@ -70,15 +70,15 @@ class BufferedRunner extends Runner { events.length ); exitCode += failures; // can this be non-numeric? - let event = events.shift(); let shouldAbort = false; + let event = events.shift(); while (event) { this.emit(event.eventName, event.data, event.error); // prefer event.data._bail over globalBail, if the former is // set, since it could be disabled on any given Runnable. // if event.data is falsy, event.error will be as well. if ( - event.error && + (failures || event.error) && event.data && (type(event.data._bail) === 'boolean' ? event.data._bail @@ -89,18 +89,18 @@ class BufferedRunner extends Runner { } event = events.shift(); } - if (failures && opts.bail) { - shouldAbort = true; - } if (shouldAbort) { - aborted = true; + didAbort = true; await pool.terminate(true); } } catch (err) { - if (!aborted) { + if (!didAbort) { debug('terminating pool due to uncaught exception'); + didAbort = true; await pool.terminate(true); this.uncaught(err); + } else { + debug('thread pool terminated; skipping file %s', file); } } finally { debug('done running file %s', file); @@ -110,23 +110,11 @@ class BufferedRunner extends Runner { await pool.terminate(); - // XXX I'm not sure this is ever non-empty - const uncaughtExceptions = results.filter( - ({status}) => status === 'rejected' - ); - if (uncaughtExceptions.length) { - debug('found %d uncaught exceptions', uncaughtExceptions.length); - process.nextTick(() => { - throw uncaughtExceptions.shift(); - }); - } this.emit(EVENT_RUN_END); debug('exiting with code %d', exitCode); callback(exitCode); } catch (err) { - // any error caught here should be considered unrecoverable, - // since it will have come out of the worker pool. - console.log(err); + // this is an "unknown" error; probably from 3p code process.nextTick(() => { throw err; }); diff --git a/package.json b/package.json index c0283ec814..e62fb599f1 100644 --- a/package.json +++ b/package.json @@ -58,7 +58,6 @@ "minimatch": "3.0.4", "ms": "2.1.2", "object.assign": "4.1.0", - "promise.allsettled": "^1.0.2", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", "which": "2.0.2", @@ -122,7 +121,7 @@ "svgo": "^1.3.2", "through2": "^3.0.1", "to-vfile": "^6.1.0", - "unexpected": "^11.13.0", + "unexpected": "^11.14.0", "unexpected-eventemitter": "^2.0.0", "unexpected-sinon": "^10.11.2", "uslug": "^1.0.4", diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index fe4e3a09af..3702a431bc 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -70,6 +70,81 @@ describe('buffered-runner', function() { ); }); + describe('when a worker fails', function() { + it('should cleanly force-terminate the thread pool', function(done) { + const opts = {}; + run.withArgs('some-file.js', opts).rejects(new Error('whoops')); + run.withArgs('some-other-file.js', opts).resolves({ + failures: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + { + args: [true] + }, + {args: []} + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + + it('should delegate to Runner#uncaught', function(done) { + const opts = {}; + sandbox.spy(runner, 'uncaught'); + const err = new Error('whoops'); + run.withArgs('some-file.js', opts).rejects(new Error('whoops')); + run.withArgs('some-other-file.js', opts).resolves({ + failures: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(runner.uncaught, 'to have a call satisfying', [err]); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + }); + describe('when not provided a max job count', function() { it('should use a max job count based on CPU cores', function(done) { runner.run( @@ -113,7 +188,7 @@ describe('buffered-runner', function() { }); }); - describe('when provided bail flag', function() { + describe('when provided global bail flag', function() { describe('when no event contains an error', function() { it('should not force-terminate', function(done) { run.resolves({ @@ -141,32 +216,135 @@ describe('buffered-runner', function() { }).and('was called once'); done(); }, - {files: ['some-file.js'], opts: {bail: true}} + { + files: ['some-file.js', 'some-other-file.js'], + opts: {bail: true} + } ); }); }); - describe('when an event contains an error', function() { - it('should force-terminate', function(done) { - const err = { - __type: 'Error', - message: 'oh no' - }; - Error.captureStackTrace(err); + describe('when an event contains an error and has positive failures', function() { + describe('when subsequent files have not yet been run', function() { + it('should cleanly terminate the thread pool', function(done) { + const opts = {bail: true}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', opts).resolves({ + failures: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + run.withArgs('some-other-file.js', opts).rejects(); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: [true]}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + }); + describe('when subsequent files already started running', function() { + it('should cleanly terminate the thread pool', function(done) { + const opts = {bail: true}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', opts).resolves({ + failures: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + run.withArgs('some-other-file.js', opts).resolves({ + failures: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: [true]}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + }); + }); + }); + + describe('when a suite has a bail flag', function() { + describe('when no event contains an error', function() { + it('should not force-terminate', function(done) { run.resolves({ - failures: 1, + failures: 0, events: [ { - eventName: EVENT_TEST_FAIL, + eventName: EVENT_TEST_PASS, data: { title: 'some test' - }, - error: err + } }, { eventName: EVENT_SUITE_END, data: { - title: 'some suite' + title: 'some suite', + _bail: true } } ] @@ -174,16 +352,166 @@ describe('buffered-runner', function() { runner.run( () => { - expect(terminate, 'to have calls satisfying', [ - {args: [true]}, - {args: []} - ]); + expect(terminate, 'to have a call satisfying', { + args: [] + }).and('was called once'); done(); }, - {files: ['some-file.js'], opts: {bail: true}} + { + files: ['some-file.js', 'some-other-file.js'], + opts: {} + } ); }); }); + describe('when an event contains an error and has positive failures', function() { + describe('when subsequent files have not yet been run', function() { + it('should cleanly terminate the thread pool', function(done) { + const opts = {bail: true}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', opts).resolves({ + failures: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + run.withArgs('some-other-file.js', opts).rejects(); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: [true]}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + }); + + describe('when subsequent files already started running', function() { + it('should cleanly terminate the thread pool', function(done) { + const opts = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', opts).resolves({ + failures: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', opts).resolves({ + failures: 0, + events: [ + { + eventName: EVENT_TEST_PASS, + data: { + title: 'some test' + } + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite' + } + } + ] + }); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: [true]}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + }); + + describe('when subsequent files have not yet been run', function() { + it('should cleanly terminate the thread pool', function(done) { + const opts = {}; + const err = { + __type: 'Error', + message: 'oh no' + }; + run.withArgs('some-file.js', opts).resolves({ + failures: 1, + events: [ + { + eventName: EVENT_TEST_FAIL, + data: { + title: 'some test' + }, + error: err + }, + { + eventName: EVENT_SUITE_END, + data: { + title: 'some suite', + _bail: true + } + } + ] + }); + run.withArgs('some-other-file.js', opts).rejects(); + + runner.run( + () => { + expect(terminate, 'to have calls satisfying', [ + {args: [true]}, // this is the pool force-terminating + {args: []} // this will always be called, and will do nothing due to the previous call + ]).and('was called twice'); + done(); + }, + { + files: ['some-file.js', 'some-other-file.js'], + opts + } + ); + }); + }); + }); }); }); }); diff --git a/test/node-unit/serializer.spec.js b/test/node-unit/serializer.spec.js index 56c8dfdad5..39f6f6fb65 100644 --- a/test/node-unit/serializer.spec.js +++ b/test/node-unit/serializer.spec.js @@ -530,23 +530,11 @@ describe('serializer', function() { // the following two tests should be combined into one, but not sure how to express // as a single assertion - it('should add a `__type` prop', function() { + it('should add a readonly `__type` prop', function() { expect( new SerializableWorkerResult(), - 'to have property', - '__type', - 'SerializableWorkerResult' - ); - }); - - it('should not allow writing to the `__type` prop', function() { - const result = new SerializableWorkerResult(); - expect( - () => { - result.__type = 'a bird'; - }, - 'to throw', - TypeError + 'to have readonly property', + '__type' ); }); }); diff --git a/test/unit/runner.spec.js b/test/unit/runner.spec.js index 79edfc47a1..16c7de3642 100644 --- a/test/unit/runner.spec.js +++ b/test/unit/runner.spec.js @@ -416,10 +416,14 @@ describe('Runner', function() { hook.parent = suite; var err = new Error('error'); suite.bail(false); - runner.on(EVENT_RUN_END, function() { - throw new Error('"end" was emit, but the bail is false'); - }); - runner.failHook(hook, err); + expect( + function() { + runner.failHook(hook, err); + }, + 'not to emit from', + hook, + EVENT_RUN_END + ); done(); }); }); From 5092f08521a36f16993eacb8d1a7a216654412cc Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 25 Mar 2020 17:38:20 -0700 Subject: [PATCH 14/92] add test for Buffered reporter --- .eslintrc.yml | 1 + lib/reporters/buffered.js | 73 ++++++---- test/reporters/buffered.spec.js | 232 ++++++++++++++++++++++++++++++++ 3 files changed, 281 insertions(+), 25 deletions(-) create mode 100644 test/reporters/buffered.spec.js diff --git a/.eslintrc.yml b/.eslintrc.yml index a90505535e..4310cee1bc 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -31,6 +31,7 @@ overrides: - 'lib/worker.js' - 'lib/reporters/buffered.js' - 'lib/serializer.js' + - 'test/reporters/buffered.spec.js' parserOptions: ecmaVersion: 2018 env: diff --git a/lib/reporters/buffered.js b/lib/reporters/buffered.js index 9be6d3df48..abdf6f69f6 100644 --- a/lib/reporters/buffered.js +++ b/lib/reporters/buffered.js @@ -26,9 +26,32 @@ const {SerializableEvent, SerializableWorkerResult} = require('../serializer'); const Base = require('./base'); /** - * The `Buffered` reporter is for use by parallel runs. Instead of outputting + * List of events to listen to; these will be buffered and sent + * when `Mocha#run` is complete (via {@link Buffered#done}). + */ +const EVENT_NAMES = [ + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_BEGIN, + EVENT_TEST_PENDING, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_RETRY, + EVENT_TEST_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END +]; + +/** + * Like {@link EVENT_NAMES}, except we expect these events to only be emitted + * by the `Runner` once. + */ +const ONCE_EVENT_NAMES = [EVENT_DELAY_BEGIN, EVENT_DELAY_END]; + +/** + * The `Buffered` reporter is for use by concurrent runs. Instead of outputting * to `STDOUT`, etc., it retains a list of events it receives and hands these - * off to the callback passed into {@link Mocha#run}. That callback will then + * off to the callback passed into {@link Mocha#run}. That callback will then * return the data to the main process. */ class Buffered extends Base { @@ -46,37 +69,37 @@ class Buffered extends Base { */ const events = (this.events = []); + /** + * mapping of event names to listener functions we've created, + * so we can cleanly _remove_ them from the runner once it's completed. + */ const listeners = new Map(); - const createListener = evt => { - const listener = (runnable, err) => { - events.push(SerializableEvent.create(evt, runnable, err)); - }; - listeners.set(evt, listener); - return listener; - }; - [ - EVENT_DELAY_BEGIN, - EVENT_SUITE_BEGIN, - EVENT_SUITE_END, - EVENT_TEST_BEGIN, - EVENT_TEST_PENDING, - EVENT_TEST_FAIL, - EVENT_TEST_PASS, - EVENT_TEST_RETRY, - EVENT_TEST_END, - EVENT_DELAY_END, - EVENT_HOOK_BEGIN, - EVENT_HOOK_END - ].forEach(evt => { + /** + * Creates a listener for event `eventName` and adds it to the `listeners` + * map. This is a defensive measure, so that we don't a) leak memory or b) + * remove _other_ listeners that may not be associated with this reporter. + * @param {string} eventName - Event name + */ + const createListener = eventName => + listeners + .set(eventName, (runnable, err) => { + events.push(SerializableEvent.create(eventName, runnable, err)); + }) + .get(eventName); + + EVENT_NAMES.forEach(evt => { runner.on(evt, createListener(evt)); }); + ONCE_EVENT_NAMES.forEach(evt => { + runner.once(evt, createListener(evt)); + }); runner.once(EVENT_RUN_END, () => { listeners.forEach((listener, evt) => { runner.removeListener(evt, listener); + listeners.delete(evt); }); - listeners.clear(); }); } @@ -88,7 +111,7 @@ class Buffered extends Base { */ done(failures, callback) { callback(SerializableWorkerResult.create(this.events, failures)); - this.events = []; + this.events = []; // defensive } } diff --git a/test/reporters/buffered.spec.js b/test/reporters/buffered.spec.js new file mode 100644 index 0000000000..790e0d63c8 --- /dev/null +++ b/test/reporters/buffered.spec.js @@ -0,0 +1,232 @@ +'use strict'; + +// this reporter does not actually output anything to the terminal, so we +// need to test it differently. + +const { + EVENT_SUITE_BEGIN, + EVENT_SUITE_END, + EVENT_TEST_FAIL, + EVENT_TEST_PASS, + EVENT_TEST_PENDING, + EVENT_TEST_BEGIN, + EVENT_TEST_END, + EVENT_TEST_RETRY, + EVENT_DELAY_BEGIN, + EVENT_DELAY_END, + EVENT_HOOK_BEGIN, + EVENT_HOOK_END, + EVENT_RUN_END +} = require('../../lib/runner').constants; +const {EventEmitter} = require('events'); +const {createSandbox} = require('sinon'); +const rewiremock = require('rewiremock/node'); + +describe('Buffered', function() { + let sandbox; + let runner; + let Buffered; + + beforeEach(function() { + sandbox = createSandbox(); + runner = new EventEmitter(); + Buffered = rewiremock.proxy( + require.resolve('../../lib/reporters/buffered.js'), + { + '../../lib/serializer': { + SerializableEvent: { + create: (eventName, runnable, err) => ({ + eventName, + data: runnable, + error: err, + __type: 'MockSerializableEvent' + }) + }, + SerializableWorkerResult: { + create: (events, failures) => ({ + events, + failures, + __type: 'MockSerializableWorkerResult' + }) + } + }, + '../../lib/reporters/base': class MockBase {} + } + ); + rewiremock.enable(); + }); + + afterEach(function() { + sandbox.restore(); + rewiremock.disable(); + }); + + describe('constructor', function() { + it('should listen for Runner events', function() { + // EventEmitter#once calls thru to EventEmitter#on, which + // befouls our assertion below. + sandbox.stub(runner, 'once'); + sandbox.stub(runner, 'on'); + // eslint-disable-next-line no-new + new Buffered(runner); + expect(runner.on, 'to have calls satisfying', [ + // via Buffered + [EVENT_SUITE_BEGIN, expect.it('to be a function')], + [EVENT_SUITE_END, expect.it('to be a function')], + [EVENT_TEST_BEGIN, expect.it('to be a function')], + [EVENT_TEST_PENDING, expect.it('to be a function')], + [EVENT_TEST_FAIL, expect.it('to be a function')], + [EVENT_TEST_PASS, expect.it('to be a function')], + [EVENT_TEST_RETRY, expect.it('to be a function')], + [EVENT_TEST_END, expect.it('to be a function')], + [EVENT_HOOK_BEGIN, expect.it('to be a function')], + [EVENT_HOOK_END, expect.it('to be a function')] + ]); + }); + + it('should listen for Runner events expecting to occur once', function() { + sandbox.stub(runner, 'once'); + // eslint-disable-next-line no-new + new Buffered(runner); + expect(runner.once, 'to have calls satisfying', [ + [EVENT_DELAY_BEGIN, expect.it('to be a function')], + [EVENT_DELAY_END, expect.it('to be a function')], + [EVENT_RUN_END, expect.it('to be a function')] + ]); + }); + }); + + describe('event', function() { + let reporter; + + beforeEach(function() { + reporter = new Buffered(runner); + }); + + describe('on EVENT_RUN_END', function() { + it('should remove all listeners', function() { + runner.emit(EVENT_RUN_END); + expect(runner.listeners(), 'to be empty'); + }); + }); + + describe('on any other event listened for', function() { + it('should populate its `events` array with SerializableEvents', function() { + const suite = { + title: 'some suite' + }; + const test = { + title: 'some test' + }; + runner.emit(EVENT_SUITE_BEGIN, suite); + runner.emit(EVENT_TEST_BEGIN, test); + runner.emit(EVENT_TEST_PASS, test); + runner.emit(EVENT_TEST_END, test); + runner.emit(EVENT_SUITE_END, suite); + expect(reporter.events, 'to equal', [ + { + eventName: EVENT_SUITE_BEGIN, + data: suite, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_BEGIN, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_PASS, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_END, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_SUITE_END, + data: suite, + __type: 'MockSerializableEvent' + } + ]); + }); + }); + }); + + describe('instance method', function() { + let reporter; + + beforeEach(function() { + reporter = new Buffered(runner); + }); + + describe('done', function() { + it('should execute its callback with a SerializableWorkerResult', function() { + const suite = { + title: 'some suite' + }; + const test = { + title: 'some test' + }; + runner.emit(EVENT_SUITE_BEGIN, suite); + runner.emit(EVENT_TEST_BEGIN, test); + runner.emit(EVENT_TEST_PASS, test); + runner.emit(EVENT_TEST_END, test); + runner.emit(EVENT_SUITE_END, suite); + const cb = sandbox.stub(); + reporter.done(0, cb); + expect(cb, 'to have a call satisfying', [ + { + events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: suite, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_BEGIN, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_PASS, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_TEST_END, + data: test, + __type: 'MockSerializableEvent' + }, + { + eventName: EVENT_SUITE_END, + data: suite, + __type: 'MockSerializableEvent' + } + ], + failures: 0, + __type: 'MockSerializableWorkerResult' + } + ]); + }); + + it('should reset its `events` prop', function() { + const suite = { + title: 'some suite' + }; + const test = { + title: 'some test' + }; + runner.emit(EVENT_SUITE_BEGIN, suite); + runner.emit(EVENT_TEST_BEGIN, test); + runner.emit(EVENT_TEST_PASS, test); + runner.emit(EVENT_TEST_END, test); + runner.emit(EVENT_SUITE_END, suite); + const cb = sandbox.stub(); + reporter.done(0, cb); + expect(reporter.events, 'to be empty'); + }); + }); + }); +}); From da7e206b60a563f092642f33997f6f64736aa17c Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 14:41:33 -0700 Subject: [PATCH 15/92] remove useless calls to rewiremock --- test/node-unit/buffered-runner.spec.js | 5 ----- test/node-unit/cli/config.spec.js | 5 ----- test/node-unit/cli/options.spec.js | 2 -- test/reporters/buffered.spec.js | 2 -- 4 files changed, 14 deletions(-) diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index 3702a431bc..d35573000d 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -23,7 +23,6 @@ describe('buffered-runner', function() { beforeEach(function() { sandbox = createSandbox(); - rewiremock.enable(); suite = new Suite('a root suite', {}, true); @@ -515,9 +514,5 @@ describe('buffered-runner', function() { }); }); }); - - afterEach(function() { - rewiremock.disable(); - }); }); }); diff --git a/test/node-unit/cli/config.spec.js b/test/node-unit/cli/config.spec.js index 2823cdcd24..64dc32676c 100644 --- a/test/node-unit/cli/config.spec.js +++ b/test/node-unit/cli/config.spec.js @@ -108,7 +108,6 @@ describe('cli/config', function() { beforeEach(function() { findup = {sync: sandbox.stub().returns('/some/path/.mocharc.js')}; - rewiremock.enable(); findConfig = rewiremock.proxy( require.resolve('../../../lib/cli/config'), r => ({ @@ -117,10 +116,6 @@ describe('cli/config', function() { ).findConfig; }); - afterEach(function() { - rewiremock.disable(); - }); - it('should look for one of the config files using findup-sync', function() { findConfig(); expect(findup, 'to have a call satisfying', { diff --git a/test/node-unit/cli/options.spec.js b/test/node-unit/cli/options.spec.js index 56400971ac..085ba5fc71 100644 --- a/test/node-unit/cli/options.spec.js +++ b/test/node-unit/cli/options.spec.js @@ -40,12 +40,10 @@ describe('options', function() { beforeEach(function() { sandbox = createSandbox(); - rewiremock.enable(); }); afterEach(function() { sandbox.restore(); - rewiremock.disable(); }); /** diff --git a/test/reporters/buffered.spec.js b/test/reporters/buffered.spec.js index 790e0d63c8..981b939fd8 100644 --- a/test/reporters/buffered.spec.js +++ b/test/reporters/buffered.spec.js @@ -53,12 +53,10 @@ describe('Buffered', function() { '../../lib/reporters/base': class MockBase {} } ); - rewiremock.enable(); }); afterEach(function() { sandbox.restore(); - rewiremock.disable(); }); describe('constructor', function() { From ce015ee20af93b6b6e1ecc90e1984433d5a89c8c Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 14:41:59 -0700 Subject: [PATCH 16/92] ignoring some missed coverage --- lib/buffered-runner.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 1c5fb6e82e..4fd7f64c1b 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -115,6 +115,7 @@ class BufferedRunner extends Runner { callback(exitCode); } catch (err) { // this is an "unknown" error; probably from 3p code + /* istanbul ignore next */ process.nextTick(() => { throw err; }); From e3a38a9f41298b1fe52a08ece04d225d7a52fc10 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 14:43:44 -0700 Subject: [PATCH 17/92] add unit-ish tests for worker - refactoring for `lib/worker.js` - fixed some docstrings --- lib/worker.js | 116 +++++++++++++-------- test/node-unit/worker.spec.js | 186 ++++++++++++++++++++++++++++++++++ 2 files changed, 262 insertions(+), 40 deletions(-) create mode 100644 test/node-unit/worker.spec.js diff --git a/lib/worker.js b/lib/worker.js index 60072a68fc..f32aef648e 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -1,63 +1,99 @@ 'use strict'; +const {createInvalidArgumentTypeError} = require('./errors'); const workerpool = require('workerpool'); const Mocha = require('./mocha'); const {handleRequires, validatePlugin} = require('./cli/run-helpers'); -const debug = require('debug')('mocha:worker'); +const debug = require('debug')(`mocha:worker:${process.pid}`); const {serialize} = require('./serializer'); -let bootstrapped = false; + +const BUFFERED_REPORTER_PATH = require.resolve('./reporters/buffered'); + +if (workerpool.isMainThread) { + throw new Error( + 'This script is intended to be run as a worker (by the `workerpool` package).' + ); +} + +/** + * Initializes some stuff on the first call to {@link run}. + * + * Handles `--require` and `--ui`. Does _not_ handle `--reporter`, + * as only the `Buffered` reporter is used. + * + * **This function only runs once**; it overwrites itself with a no-op + * before returning. + * + * @param {Options} argv - Command-line options + */ +let bootstrap = argv => { + handleRequires(argv.require); + validatePlugin(argv, 'ui', Mocha.interfaces); + process.on('beforeExit', () => { + /* istanbul ignore next */ + debug('exiting'); + }); + debug('bootstrapped'); + bootstrap = () => {}; +}; /** * Runs a single test file in a worker thread. - * @param {string} file - Filepath of test file - * @param {Options} argv - Parsed command-line options object - * @returns {Promise<[number, BufferedEvent[]]>} A tuple of failures and - * serializable event data + * @param {string} filepath - Filepath of test file + * @param {Options} [argv] - Parsed command-line options object + * @returns {Promise<{failures: number, events: BufferedEvent[]}>} - Test + * failure count and list of events. */ -async function run(file, argv) { - debug('running test file %s on process [%d]', file, process.pid); - // the buffered reporter retains its events; these events are returned - // from this function back to the main process. - argv.reporter = require.resolve('./reporters/buffered'); - // if these were set, it would cause infinite recursion by spawning another worker - delete argv.parallel; - delete argv.jobs; - if (!bootstrapped) { - // setup requires and ui, but only do this once--we will reuse this worker! - handleRequires(argv.require); - validatePlugin(argv, 'ui', Mocha.interfaces); - bootstrapped = true; - debug('bootstrapped process [%d]', process.pid); +async function run(filepath, argv = {ui: 'bdd'}) { + if (!filepath) { + throw createInvalidArgumentTypeError( + 'Expected a non-empty "filepath" argument', + 'file', + 'string' + ); } - const mocha = new Mocha(argv); - mocha.files = [file]; + + debug('running test file %s', filepath); + + const opts = Object.assign(argv, { + // workers only use the `Buffered` reporter. + reporter: BUFFERED_REPORTER_PATH, + // if this was true, it would cause infinite recursion. + parallel: false + }); + + bootstrap(opts); + + const mocha = new Mocha(opts).addFile(filepath); + try { await mocha.loadFilesAsync(); } catch (err) { - debug( - 'process [%d] rejecting; could not load file %s: %s', - process.pid, - file, - err - ); + debug('could not load file %s: %s', filepath, err); throw err; } - return new Promise(resolve => { - // TODO: figure out exactly what the sad path looks like here. - // will depend on allowUncaught - // rejection should only happen if an error is "unrecoverable" + + return new Promise((resolve, reject) => { mocha.run(result => { + // Runner adds these; if we don't remove them, we'll get a leak. process.removeAllListeners('uncaughtException'); - debug('process [%d] resolving', process.pid); - resolve(serialize(result)); + + debug('completed run with %d test failures', result.failures); + try { + resolve(serialize(result)); + } catch (err) { + // TODO: figure out exactly what the sad path looks like here. + // rejection should only happen if an error is "unrecoverable" + reject(err); + } }); }); } -workerpool.worker({ - run -}); +// this registers the `run` function. +workerpool.worker({run}); + +debug('running'); -process.on('beforeExit', () => { - debug('process [%d] exiting', process.pid); -}); +// for testing +exports.run = run; diff --git a/test/node-unit/worker.spec.js b/test/node-unit/worker.spec.js new file mode 100644 index 0000000000..4b62a1f9b6 --- /dev/null +++ b/test/node-unit/worker.spec.js @@ -0,0 +1,186 @@ +'use strict'; + +const {SerializableWorkerResult} = require('../../lib/serializer'); +const rewiremock = require('rewiremock/node'); +const {createSandbox} = require('sinon'); + +const WORKER_PATH = require.resolve('../../lib/worker.js'); + +describe('worker', function() { + let worker; + let workerpoolWorker; + let sandbox; + + beforeEach(function() { + sandbox = createSandbox(); + workerpoolWorker = sandbox.stub(); + sandbox.spy(process, 'removeAllListeners'); + }); + + describe('when run as main "thread"', function() { + it('should throw', function() { + expect(() => { + rewiremock.proxy(WORKER_PATH, { + workerpool: { + isMainThread: true, + worker: workerpoolWorker + } + }); + }, 'to throw'); + }); + }); + + describe('when run as "worker thread"', function() { + class MockMocha {} + let serializer; + let runHelpers; + + beforeEach(function() { + MockMocha.prototype.addFile = sandbox.stub().returnsThis(); + MockMocha.prototype.loadFilesAsync = sandbox.stub(); + MockMocha.prototype.run = sandbox.stub(); + MockMocha.interfaces = { + bdd: sandbox.stub() + }; + + serializer = { + serialize: sandbox.stub() + }; + + runHelpers = { + handleRequires: sandbox.stub(), + validatePlugin: sandbox.stub() + }; + + worker = rewiremock.proxy(WORKER_PATH, { + workerpool: { + isMainThread: false, + worker: workerpoolWorker + }, + '../../lib/mocha': MockMocha, + '../../lib/serializer': serializer, + '../../lib/cli/run-helpers': runHelpers + }); + }); + + it('should register itself with workerpool', function() { + expect(workerpoolWorker, 'to have a call satisfying', [ + {run: worker.run} + ]); + }); + + describe('function', function() { + describe('run', function() { + describe('when called without arguments', function() { + it('should reject', async function() { + return expect(worker.run, 'to be rejected with error satisfying', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + describe('when called with empty "filepath" argument', function() { + it('should reject', async function() { + return expect( + () => worker.run(''), + 'to be rejected with error satisfying', + { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + } + ); + }); + }); + + describe('when the file at "filepath" argument is unloadable', function() { + it('should reject', async function() { + MockMocha.prototype.loadFilesAsync.rejects(); + return expect( + () => worker.run('some-non-existent-file.js'), + 'to be rejected' + ); + }); + }); + + describe('when the file at "filepath" is loadable', function() { + let result; + beforeEach(function() { + result = SerializableWorkerResult.create(); + + MockMocha.prototype.loadFilesAsync.resolves(); + MockMocha.prototype.run.yields(result); + }); + + it('should handle "--require"', async function() { + await worker.run('some-file.js', {require: 'foo'}); + expect(runHelpers.handleRequires, 'to have a call satisfying', [ + 'foo' + ]).and('was called once'); + }); + + it('should handle "--ui"', async function() { + const argv = {}; + await worker.run('some-file.js', argv); + + expect(runHelpers.validatePlugin, 'to have a call satisfying', [ + argv, + 'ui', + MockMocha.interfaces + ]).and('was called once'); + }); + + it('should call Mocha#run', async function() { + await worker.run('some-file.js'); + expect(MockMocha.prototype.run, 'was called once'); + }); + + it('should remove all uncaughtException listeners', async function() { + await worker.run('some-file.js'); + expect(process.removeAllListeners, 'to have a call satisfying', [ + 'uncaughtException' + ]); + }); + + describe('when serialization succeeds', function() { + beforeEach(function() { + serializer.serialize.returnsArg(0); + }); + + it('should resolve with a SerializedWorkerResult', async function() { + return expect( + worker.run('some-file.js'), + 'to be fulfilled with', + result + ); + }); + }); + + describe('when serialization fails', function() { + beforeEach(function() { + serializer.serialize.throws(); + }); + + it('should reject', async function() { + return expect(worker.run('some-file.js'), 'to be rejected'); + }); + }); + + describe('when run twice', function() { + it('should initialize only once', async function() { + await worker.run('some-file.js'); + await worker.run('some-other-file.js'); + + expect(runHelpers, 'to satisfy', { + handleRequires: expect.it('was called once'), + validatePlugin: expect.it('was called once') + }); + }); + }); + }); + }); + }); + }); + + afterEach(function() { + sandbox.restore(); + }); +}); From 947a00c13c70facd44e0b58e102ab041cce209f5 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:09:25 -0700 Subject: [PATCH 18/92] add some assertions for RawRunResult to match JSONRunResult you can now use "to have passed test count", "to have failed test count" and "to have pending test count" with a `RawRunResult` (the result of calling `helpers.runMocha()`) Signed-off-by: Christopher Hiller --- test/assertions.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/test/assertions.js b/test/assertions.js index 7453392059..9ce97e75d9 100644 --- a/test/assertions.js +++ b/test/assertions.js @@ -118,6 +118,24 @@ exports.mixinMochaAssertions = function(expect) { }); } ) + .addAssertion( + ' [not] to have failed [test] count ', + function(expect, result, count) { + expect(result.failing, '[not] to be', count); + } + ) + .addAssertion( + ' [not] to have passed [test] count ', + function(expect, result, count) { + expect(result.passing, '[not] to be', count); + } + ) + .addAssertion( + ' [not] to have pending [test] count ', + function(expect, result, count) { + expect(result.pending, '[not] to be', count); + } + ) .addAssertion(' [not] to have test count ', function( expect, result, From b161fc01cd2b3072c37541d994f828e73b10b07b Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:26:43 -0700 Subject: [PATCH 19/92] add assertion to check exit code of a RawResult --- test/assertions.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/assertions.js b/test/assertions.js index 9ce97e75d9..ef678ff4ea 100644 --- a/test/assertions.js +++ b/test/assertions.js @@ -333,7 +333,7 @@ exports.mixinMochaAssertions = function(expect) { } ) .addAssertion( - ' to have [exit] code ', + ' to have [exit] code ', function(expect, result, code) { expect(result.code, 'to be', code); } From 45233d29083b83d6008dc422d83fd4b0fbf3fe03 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:27:20 -0700 Subject: [PATCH 20/92] add a debug stmt to Buffered reporter --- lib/reporters/buffered.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/reporters/buffered.js b/lib/reporters/buffered.js index abdf6f69f6..1bbca61d36 100644 --- a/lib/reporters/buffered.js +++ b/lib/reporters/buffered.js @@ -22,7 +22,7 @@ const { EVENT_RUN_END } = require('../runner').constants; const {SerializableEvent, SerializableWorkerResult} = require('../serializer'); -// const debug = require('debug')('mocha:reporters:buffered'); +const debug = require('debug')('mocha:reporters:buffered'); const Base = require('./base'); /** @@ -96,6 +96,7 @@ class Buffered extends Base { }); runner.once(EVENT_RUN_END, () => { + debug('received EVENT_RUN_END'); listeners.forEach((listener, evt) => { runner.removeListener(evt, listener); listeners.delete(evt); From 8aaf72f078ca680ca72f87a73d56c5f5c6c9e2dc Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:28:38 -0700 Subject: [PATCH 21/92] fix issue with serialization of cyclic objects this was discovered when testing `--parallel` with retries. also add some debug statements --- lib/serializer.js | 31 ++++++++++++++++++++----------- test/node-unit/serializer.spec.js | 15 ++++++++++++++- 2 files changed, 34 insertions(+), 12 deletions(-) diff --git a/lib/serializer.js b/lib/serializer.js index d19cff32d0..fb8c4abde0 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -2,7 +2,7 @@ const {type} = require('./utils'); const {createInvalidArgumentTypeError} = require('./errors'); -// const debug = require('debug')('mocha:serializer'); +const debug = require('debug')('mocha:serializer'); const SERIALIZABLE_RESULT_NAME = 'SerializableWorkerResult'; const SERIALIZABLE_TYPES = new Set(['object', 'array', 'function', 'error']); @@ -190,12 +190,16 @@ class SerializableEvent { parent[key] = value; // falls through case 'object': - // by adding props to the `pairs` array, we will process it further - pairs.push( - ...Object.keys(value) - .filter(key => SERIALIZABLE_TYPES.has(type(value[key]))) - .map(key => [value, key]) - ); + if (type(value.serialize) === 'function') { + parent[key] = value.serialize(); + } else { + // by adding props to the `pairs` array, we will process it further + pairs.push( + ...Object.keys(value) + .filter(key => SERIALIZABLE_TYPES.has(type(value[key]))) + .map(key => [value, key]) + ); + } break; case 'function': // we _may_ want to dig in to functions for some assertion libraries @@ -334,9 +338,12 @@ class SerializableEvent { * @param {*} [value] - A value to serialize */ exports.serialize = function serialize(value) { - return type(value) === 'object' && type(value.serialize) === 'function' - ? value.serialize() - : value; + const result = + type(value) === 'object' && type(value.serialize) === 'function' + ? value.serialize() + : value; + debug('serialized: %O', result); + return result; }; /** @@ -348,9 +355,11 @@ exports.serialize = function serialize(value) { * @param {*} [value] - A "message" to deserialize */ exports.deserialize = function deserialize(value) { - return SerializableWorkerResult.isSerializedWorkerResult(value) + const result = SerializableWorkerResult.isSerializedWorkerResult(value) ? SerializableWorkerResult.deserialize(value) : value; + debug('deserialized: %O', result); + return result; }; exports.SerializableEvent = SerializableEvent; diff --git a/test/node-unit/serializer.spec.js b/test/node-unit/serializer.spec.js index 39f6f6fb65..437f133a4e 100644 --- a/test/node-unit/serializer.spec.js +++ b/test/node-unit/serializer.spec.js @@ -140,8 +140,21 @@ describe('serializer', function() { }); }); + describe('when passed an object containing an object with a `serialize` method', function() { + it('should call the `serialize` method', function() { + const stub = sandbox.stub(); + const obj = { + nested: { + serialize: stub + } + }; + SerializableEvent.create('some-event', obj).serialize(); + expect(stub, 'was called once'); + }); + }); + describe('when passed an object containing a non-`serialize` method', function() { - it('should remove functions', function() { + it('should remove the method', function() { const obj = { func: () => {} }; From 9090d4113089fd47c44b865b398de6bd400851c0 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:28:59 -0700 Subject: [PATCH 22/92] add a couple debugs to worker --- lib/worker.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/worker.js b/lib/worker.js index f32aef648e..37bd45b7ae 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -80,10 +80,13 @@ async function run(filepath, argv = {ui: 'bdd'}) { debug('completed run with %d test failures', result.failures); try { - resolve(serialize(result)); + const serialized = serialize(result); + debug('returning to main process'); + resolve(serialized); } catch (err) { // TODO: figure out exactly what the sad path looks like here. // rejection should only happen if an error is "unrecoverable" + debug('rejecting: %O', err); reject(err); } }); From 552d92b72948dc844409cb244b59ec83d870580d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:29:20 -0700 Subject: [PATCH 23/92] add a simple integration test for --jobs --- test/integration/options/jobs.spec.js | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 test/integration/options/jobs.spec.js diff --git a/test/integration/options/jobs.spec.js b/test/integration/options/jobs.spec.js new file mode 100644 index 0000000000..935c60c512 --- /dev/null +++ b/test/integration/options/jobs.spec.js @@ -0,0 +1,19 @@ +'use strict'; + +var path = require('path'); +var helpers = require('../helpers'); +var runMochaAsync = helpers.runMochaAsync; + +describe('--jobs', function() { + it('should not work without --parallel', function() { + return expect( + runMochaAsync( + path.join('options', 'parallel', '*.fixture.js'), + ['--jobs', '3'], + 'pipe' + ), + 'to be fulfilled with value satisfying', + {output: /Missing\s+dependent\s+arguments[^]\s+jobs\s+->\s+parallel/i} + ); + }); +}); From 269bcccee9263d7aa203040bc78dd85b63567ff0 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:30:14 -0700 Subject: [PATCH 24/92] refactor/reorganize some of the integration test helpers - adds `runMochaAsync` and `runMochaJSONAsync` for convenience --- test/integration/helpers.js | 276 ++++++++++++++++++++++-------------- 1 file changed, 173 insertions(+), 103 deletions(-) diff --git a/test/integration/helpers.js b/test/integration/helpers.js index b84048c5a3..24abda8d3a 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -11,108 +11,6 @@ var _MOCHA_EXECUTABLE = require.resolve('../../bin/_mocha'); module.exports = { DEFAULT_FIXTURE: DEFAULT_FIXTURE, - /** - * Invokes the mocha binary for the given fixture with color output disabled. - * Accepts an array of additional command line args to pass. The callback is - * invoked with a summary of the run, in addition to its output. The summary - * includes the number of passing, pending, and failing tests, as well as the - * exit code. Useful for testing different reporters. - * - * By default, `STDERR` is ignored. Pass `{stdio: 'pipe'}` as `opts` if you - * want it. - * Example response: - * { - * pending: 0, - * passing: 0, - * failing: 1, - * code: 1, - * output: '...' - * } - * - * @param {string} fixturePath - Path to fixture .js file - * @param {string[]} args - Extra args to mocha executable - * @param {Function} fn - Callback - * @param {Object} [opts] - Options for `spawn()` - */ - runMocha: function(fixturePath, args, fn, opts) { - if (typeof args === 'function') { - opts = fn; - fn = args; - args = []; - } - - var path; - - path = resolveFixturePath(fixturePath); - args = args || []; - - invokeSubMocha( - args.concat(path), - function(err, res) { - if (err) { - return fn(err); - } - - fn(null, getSummary(res)); - }, - opts - ); - }, - - /** - * Invokes the mocha binary for the given fixture using the JSON reporter, - * returning the parsed output, as well as exit code. - * - * By default, `STDERR` is ignored. Pass `{stdio: 'pipe'}` as `opts` if you - * want it. - * @param {string} fixturePath - Path from __dirname__ - * @param {string[]} args - Array of args - * @param {Function} fn - Callback - * @param {Object} [opts] - Opts for `spawn()` - * @returns {*} Parsed object - */ - runMochaJSON: function(fixturePath, args, fn, opts) { - if (typeof args === 'function') { - opts = fn; - fn = args; - args = []; - } - - var path; - - path = resolveFixturePath(fixturePath); - args = (args || []).concat('--reporter', 'json', path); - - return invokeMocha( - args, - function(err, res) { - if (err) { - return fn(err); - } - - var result; - try { - // attempt to catch a JSON parsing error *only* here. - // previously, the callback was called within this `try` block, - // which would result in errors thrown from the callback - // getting caught by the `catch` block below. - result = toJSONRunResult(res); - } catch (err) { - return fn( - new Error( - format( - 'Failed to parse JSON reporter output. Error:\n%O\nResult:\n%O', - err, - res - ) - ) - ); - } - fn(null, result); - }, - opts - ); - }, /** * regular expression used for splitting lines based on new line / dot symbol. @@ -159,9 +57,159 @@ module.exports = { */ escapeRegExp: function escapeRegExp(str) { return str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); // $& means the whole matched string - } + }, + + runMocha: runMocha, + runMochaJSON: runMochaJSON, + runMochaAsync: runMochaAsync, + runMochaJSONAsync: runMochaJSONAsync }; +/** + * Invokes the mocha binary for the given fixture with color output disabled. + * Accepts an array of additional command line args to pass. The callback is + * invoked with a summary of the run, in addition to its output. The summary + * includes the number of passing, pending, and failing tests, as well as the + * exit code. Useful for testing different reporters. + * + * By default, `STDERR` is ignored. Pass `{stdio: 'pipe'}` as `opts` if you + * want it. + * Example response: + * { + * pending: 0, + * passing: 0, + * failing: 1, + * code: 1, + * output: '...' + * } + * + * @param {string} fixturePath - Path to fixture .js file + * @param {string[]} args - Extra args to mocha executable + * @param {Function} fn - Callback + * @param {Object} [opts] - Options for `spawn()` + */ +function runMocha(fixturePath, args, fn, opts) { + if (typeof args === 'function') { + opts = fn; + fn = args; + args = []; + } + + var path; + + path = resolveFixturePath(fixturePath); + args = args || []; + + return invokeSubMocha( + args.concat(path), + function(err, res) { + if (err) { + return fn(err); + } + + fn(null, getSummary(res)); + }, + opts + ); +} + +/** + * Invokes the mocha binary for the given fixture using the JSON reporter, + * returning the parsed output, as well as exit code. + * + * By default, `STDERR` is ignored. Pass `{stdio: 'pipe'}` as `opts` if you + * want it. + * @param {string} fixturePath - Path from __dirname__ + * @param {string[]} args - Array of args + * @param {Function} fn - Callback + * @param {Object} [opts] - Opts for `spawn()` + * @returns {*} Parsed object + */ +function runMochaJSON(fixturePath, args, fn, opts) { + if (typeof args === 'function') { + opts = fn; + fn = args; + args = []; + } + + var path; + + path = resolveFixturePath(fixturePath); + args = (args || []).concat('--reporter', 'json', path); + + return invokeMocha( + args, + function(err, res) { + if (err) { + return fn(err); + } + + var result; + try { + // attempt to catch a JSON parsing error *only* here. + // previously, the callback was called within this `try` block, + // which would result in errors thrown from the callback + // getting caught by the `catch` block below. + result = toJSONRunResult(res); + } catch (err) { + return fn( + new Error( + format( + 'Failed to parse JSON reporter output. Error:\n%O\nResult:\n%O', + err, + res + ) + ) + ); + } + fn(null, result); + }, + opts + ); +} + +/** + * Like {@link runMocha}, but returns a `Promise`. + * + * If you need more granular control, try {@link invokeMochaAsync} instead. + * + * @param {string} fixturePath - Path to (or name of, or basename of) fixture `.js` file + * @param {Options} [args] - Command-line arguments to the `mocha` executable + * @param {Object} [opts] - Options for `child_process.spawn`. + * @returns {Promise} + */ +function runMochaAsync(fixturePath, args, opts) { + return new Promise(function(resolve, reject) { + runMocha( + fixturePath, + args, + function(err, result) { + if (err) { + return reject(err); + } + resolve(result); + }, + opts + ); + }); +} + +function runMochaJSONAsync(fixturePath, args, opts) { + return new Promise(function(resolve, reject) { + runMochaJSON( + fixturePath, + args, + function(err, result) { + if (err) { + return reject(err); + } + resolve(result); + }, + opts + ); + }); +} + /** * Coerce output as returned by _spawnMochaWithListeners using JSON reporter into a JSONRunResult as * recognized by our custom unexpected assertions @@ -178,6 +226,15 @@ function toJSONRunResult(result) { /** * Creates arguments loading a default fixture if none provided * + * - The `--no-color` arg is always used (color output complicates testing `STDOUT`) + * - Unless `--bail` or `--no-bail` is set, use `--no-bail`. This enables using + * `--bail` (if desired) from the command-line when running our integration + * test suites without stepping on the toes of subprocesses. + * - Unless `--parallel` or `--no-parallel` is set, use `--no-parallel`. We + * assume the test suite is _already_ running in parallel--and there's no point + * in trying to run a single test fixture in parallel. + * - The {@link DEFAULT_FIXTURE} file is used if no arguments are provided. + * * @param {string[]|*} [args] - Arguments to `spawn` * @returns string[] */ @@ -317,6 +374,11 @@ function resolveFixturePath(fixture) { return path.join('test', 'integration', 'fixtures', fixture); } +/** + * Parses some `mocha` reporter output and returns a summary based on the "epilogue" + * @param {string} res - Typically output of STDOUT from the 'spec' reporter + * @returns {Summary} + */ function getSummary(res) { return ['passing', 'pending', 'failing'].reduce(function(summary, type) { var pattern, match; @@ -328,3 +390,11 @@ function getSummary(res) { return summary; }, res); } + +/** + * A summary of a `mocha` run + * @typedef {Object} Summary + * @property {number} passing - Number of passing tests + * @property {number} pending - Number of pending tests + * @property {number} failing - Number of failing tests + */ From 9a2a81650d02c6f5e12d0e65eaa92f0017031f99 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 26 Mar 2020 17:30:44 -0700 Subject: [PATCH 25/92] add tests for --parallel with --retries and --allow-uncaught --- .../options/parallel/retries-a.fixture.js | 5 ++ .../options/parallel/retries-b.fixture.js | 8 +++ .../{a.fixture.js => test-a.fixture.js} | 0 .../{b.fixture.js => test-b.fixture.js} | 0 .../{c.fixture.js => test-c.fixture.js} | 0 .../{d.fixture.js => test-d.fixture.js} | 0 .../options/parallel/uncaught.fixture.js | 7 ++ test/integration/options/parallel.spec.js | 64 ++++++++++++++----- 8 files changed, 68 insertions(+), 16 deletions(-) create mode 100644 test/integration/fixtures/options/parallel/retries-a.fixture.js create mode 100644 test/integration/fixtures/options/parallel/retries-b.fixture.js rename test/integration/fixtures/options/parallel/{a.fixture.js => test-a.fixture.js} (100%) rename test/integration/fixtures/options/parallel/{b.fixture.js => test-b.fixture.js} (100%) rename test/integration/fixtures/options/parallel/{c.fixture.js => test-c.fixture.js} (100%) rename test/integration/fixtures/options/parallel/{d.fixture.js => test-d.fixture.js} (100%) create mode 100644 test/integration/fixtures/options/parallel/uncaught.fixture.js diff --git a/test/integration/fixtures/options/parallel/retries-a.fixture.js b/test/integration/fixtures/options/parallel/retries-a.fixture.js new file mode 100644 index 0000000000..e5d37ca561 --- /dev/null +++ b/test/integration/fixtures/options/parallel/retries-a.fixture.js @@ -0,0 +1,5 @@ +describe('retry suite A', function() { + it('should pass', function() { + + }); +}); \ No newline at end of file diff --git a/test/integration/fixtures/options/parallel/retries-b.fixture.js b/test/integration/fixtures/options/parallel/retries-b.fixture.js new file mode 100644 index 0000000000..c1f1d3f7ba --- /dev/null +++ b/test/integration/fixtures/options/parallel/retries-b.fixture.js @@ -0,0 +1,8 @@ +describe('retry suite B', function() { + let count = 0; + it('should retry', function() { + this.retries(3); + console.log(`count: ${++count}`); + throw new Error('failure'); + }); +}); \ No newline at end of file diff --git a/test/integration/fixtures/options/parallel/a.fixture.js b/test/integration/fixtures/options/parallel/test-a.fixture.js similarity index 100% rename from test/integration/fixtures/options/parallel/a.fixture.js rename to test/integration/fixtures/options/parallel/test-a.fixture.js diff --git a/test/integration/fixtures/options/parallel/b.fixture.js b/test/integration/fixtures/options/parallel/test-b.fixture.js similarity index 100% rename from test/integration/fixtures/options/parallel/b.fixture.js rename to test/integration/fixtures/options/parallel/test-b.fixture.js diff --git a/test/integration/fixtures/options/parallel/c.fixture.js b/test/integration/fixtures/options/parallel/test-c.fixture.js similarity index 100% rename from test/integration/fixtures/options/parallel/c.fixture.js rename to test/integration/fixtures/options/parallel/test-c.fixture.js diff --git a/test/integration/fixtures/options/parallel/d.fixture.js b/test/integration/fixtures/options/parallel/test-d.fixture.js similarity index 100% rename from test/integration/fixtures/options/parallel/d.fixture.js rename to test/integration/fixtures/options/parallel/test-d.fixture.js diff --git a/test/integration/fixtures/options/parallel/uncaught.fixture.js b/test/integration/fixtures/options/parallel/uncaught.fixture.js new file mode 100644 index 0000000000..bca1610ab7 --- /dev/null +++ b/test/integration/fixtures/options/parallel/uncaught.fixture.js @@ -0,0 +1,7 @@ +'use strict'; + +it('throws an uncaught exception', function (done) { + process.nextTick(function () { + throw new Error('existential isolation!!'); + }); +}); diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 5f407486b6..a564ebe204 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -2,23 +2,55 @@ var path = require('path'); var helpers = require('../helpers'); -var runMochaJSON = helpers.runMochaJSON; +var runMochaAsync = helpers.runMochaAsync; +var invokeMochaAsync = helpers.invokeMochaAsync; describe('--parallel', function() { - it('should not appear fundamentally different than without', function(done) { - runMochaJSON( - path.join('options', 'parallel', '*.fixture.js'), - ['--parallel'], - function(err, res) { - if (err) { - return done(err); - } - expect(res, 'to have failed') - .and('to have passed test count', 2) - .and('to have pending test count', 1) - .and('to have failed test count', 2); - done(); - } - ); + it('should not appear fundamentally different than without', function() { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel' + ]), + 'when fulfilled', + 'to have failed' + ) + .and('when fulfilled', 'to have passed test count', 2) + .and('when fulfilled', 'to have pending test count', 1) + .and('when fulfilled', 'to have failed test count', 2); + }); + + describe('when used with --retries', function() { + it('should retry tests appropriately', function() { + return expect( + runMochaAsync( + path.join('options', 'parallel', 'retries-*.fixture.js'), + ['--parallel'] + ), + 'when fulfilled', + 'to have failed' + ) + .and('when fulfilled', 'to have passed test count', 1) + .and('when fulfilled', 'to have pending test count', 0) + .and('when fulfilled', 'to have failed test count', 1) + .and('when fulfilled', 'to contain output', /count: 3/); + }); + }); + + describe('when used with --allow-uncaught', function() { + it('should bubble up an exception', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/uncaught.fixture.js'), + '--parallel', + '--allow-uncaught' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /Error: existential isolation/i + ).and('when fulfilled', 'to have exit code', 1); + }); }); }); From b4c7b7ff5a0066b8cdacbb836fc5f738e5856cb5 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 27 Mar 2020 14:06:31 -0700 Subject: [PATCH 26/92] increase default test timeout to 1s --- .mocharc.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.mocharc.yml b/.mocharc.yml index 4a05397bee..fa429404d5 100644 --- a/.mocharc.yml +++ b/.mocharc.yml @@ -4,6 +4,6 @@ global: - okGlobalA,okGlobalB - okGlobalC - callback* -timeout: 500 +timeout: 1000 parallel: true # bail: true From 2cb5768c23a33fc0397073dbc1a3077dc853b99d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:21:49 -0700 Subject: [PATCH 27/92] fix some flaky tests when run in wallaby - `config.spec.js` was misusing `rewiremock` - `mocha.spec.js` was loading files it shouldn't --- test/node-unit/cli/config.spec.js | 19 ++++++++++++++++--- test/node-unit/mocha.spec.js | 29 ++++++++++++++--------------- 2 files changed, 30 insertions(+), 18 deletions(-) diff --git a/test/node-unit/cli/config.spec.js b/test/node-unit/cli/config.spec.js index 64dc32676c..a67586f565 100644 --- a/test/node-unit/cli/config.spec.js +++ b/test/node-unit/cli/config.spec.js @@ -1,6 +1,5 @@ 'use strict'; -const {loadConfig, parsers, CONFIG_FILES} = require('../../../lib/cli/config'); const {createSandbox} = require('sinon'); const rewiremock = require('rewiremock/node'); @@ -17,6 +16,17 @@ describe('cli/config', function() { }); describe('loadConfig()', function() { + let parsers; + let loadConfig; + + beforeEach(function() { + const config = rewiremock.proxy( + require.resolve('../../../lib/cli/config') + ); + parsers = config.parsers; + loadConfig = config.loadConfig; + }); + describe('when parsing succeeds', function() { beforeEach(function() { sandbox.stub(parsers, 'yaml').returns(config); @@ -105,15 +115,18 @@ describe('cli/config', function() { describe('findConfig()', function() { let findup; let findConfig; + let CONFIG_FILES; beforeEach(function() { findup = {sync: sandbox.stub().returns('/some/path/.mocharc.js')}; - findConfig = rewiremock.proxy( + const config = rewiremock.proxy( require.resolve('../../../lib/cli/config'), r => ({ 'find-up': r.by(() => findup) }) - ).findConfig; + ); + findConfig = config.findConfig; + CONFIG_FILES = config.CONFIG_FILES; }); it('should look for one of the config files using findup-sync', function() { diff --git a/test/node-unit/mocha.spec.js b/test/node-unit/mocha.spec.js index d665589dca..dd4f3b3e86 100644 --- a/test/node-unit/mocha.spec.js +++ b/test/node-unit/mocha.spec.js @@ -1,28 +1,24 @@ 'use strict'; -const path = require('path'); const Mocha = require('../../lib/mocha'); const utils = require('../../lib/utils'); describe('Mocha', function() { const opts = {reporter: utils.noop}; // no output - const testFiles = [ - __filename, - path.join(__dirname, 'cli', 'config.spec.js'), - path.join(__dirname, 'cli', 'run.spec.js') - ]; - const resolvedTestFiles = testFiles.map(require.resolve); describe('#addFile', function() { it('should add the given file to the files array', function() { const mocha = new Mocha(opts); - mocha.addFile(__filename); - expect(mocha.files, 'to have length', 1).and('to contain', __filename); + mocha.addFile('some-file.js'); + expect(mocha.files, 'to have length', 1).and( + 'to contain', + 'some-file.js' + ); }); it('should be chainable', function() { const mocha = new Mocha(opts); - expect(mocha.addFile(__filename), 'to be', mocha); + expect(mocha.addFile('some-file.js'), 'to be', mocha); }); }); @@ -31,9 +27,11 @@ describe('Mocha', function() { this.timeout(1000); const mocha = new Mocha(opts); - testFiles.forEach(mocha.addFile, mocha); + mocha.addFile(require.resolve('../../package.json')); mocha.loadFiles(); - expect(require.cache, 'to have keys', resolvedTestFiles); + expect(require.cache, 'to have keys', [ + require.resolve('../../package.json') + ]); }); it('should execute the optional callback if given', function() { @@ -58,11 +56,12 @@ describe('Mocha', function() { describe('#unloadFiles', function() { it('should unload all test files from cache', function() { const mocha = new Mocha(opts); - - testFiles.forEach(mocha.addFile, mocha); + mocha.addFile(require.resolve('../../package.json')); mocha.loadFiles(); mocha.unloadFiles(); - expect(require.cache, 'not to have keys', resolvedTestFiles); + expect(require.cache, 'not to have keys', [ + require.resolve('../../package.json') + ]); }); it('should be chainable', function() { From 4cfafdad1e4fcf48e0a664cd3700e2ad633cc2d3 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:22:11 -0700 Subject: [PATCH 28/92] integration test helper: add `command` prop to RawRunResult for easy copy-pasting --- test/integration/helpers.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/test/integration/helpers.js b/test/integration/helpers.js index 24abda8d3a..f8c99b55a3 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -239,14 +239,13 @@ function toJSONRunResult(result) { * @returns string[] */ function defaultArgs(args) { - var newArgs = (!args || !args.length - ? ['--file', DEFAULT_FIXTURE] - : args - ).concat(['--no-color']); - if (!newArgs.some(arg => /--bail/.test(arg))) { + var newArgs = (!args || !args.length ? [DEFAULT_FIXTURE] : args).concat([ + '--no-color' + ]); + if (!newArgs.some(arg => /--(no-)?bail/.test(arg))) { newArgs.push('--no-bail'); } - if (!newArgs.some(arg => /--parallel/.test(arg))) { + if (!newArgs.some(arg => /--(no-)?parallel/.test(arg))) { newArgs.push('--no-parallel'); } return newArgs; @@ -360,7 +359,8 @@ function _spawnMochaWithListeners(args, fn, opts) { fn(null, { output: output, code: code, - args: args + args: args, + command: args.join(' ') }); }); From 1b05f6fede8b2cecb5c4697adf15f5cee01cab68 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:22:25 -0700 Subject: [PATCH 29/92] tweak a couple debug statements --- lib/buffered-runner.js | 1 - lib/worker.js | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 4fd7f64c1b..c5587dbcb8 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -59,7 +59,6 @@ class BufferedRunner extends Runner { files.map(async file => { debug('enqueueing test file %s', file); try { - debug('calling run with: %O', [file, opts]); const result = await poolProxy.run(file, opts); debug(result); const {failures, events} = deserialize(result); diff --git a/lib/worker.js b/lib/worker.js index 37bd45b7ae..35c1c400cb 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -96,7 +96,7 @@ async function run(filepath, argv = {ui: 'bdd'}) { // this registers the `run` function. workerpool.worker({run}); -debug('running'); +debug('started worker process'); // for testing exports.run = run; From c6e560d8a380216b9eade3e587e99a5590ca663e Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:23:44 -0700 Subject: [PATCH 30/92] serialization: add missing functions `Runnable#isPending()` and `Suite#isPending()` needed by reporters, as was `Test#state` --- lib/hook.js | 1 + lib/suite.js | 1 + lib/test.js | 2 ++ 3 files changed, 4 insertions(+) diff --git a/lib/hook.js b/lib/hook.js index aa031e283f..b009c90f37 100644 --- a/lib/hook.js +++ b/lib/hook.js @@ -47,6 +47,7 @@ Hook.prototype.error = function(err) { Hook.prototype.serialize = function serialize() { return { + $$isPending: this.isPending(), $$titlePath: this.titlePath(), ctx: { currentTest: { diff --git a/lib/suite.js b/lib/suite.js index 62030fd4b6..a8301cc809 100644 --- a/lib/suite.js +++ b/lib/suite.js @@ -558,6 +558,7 @@ Suite.prototype.serialize = function serialize() { return { _bail: this._bail, $$fullTitle: this.fullTitle(), + $$isPending: this.isPending(), root: this.root, title: this.title }; diff --git a/lib/test.js b/lib/test.js index 391f613e73..3957c26c50 100644 --- a/lib/test.js +++ b/lib/test.js @@ -72,6 +72,7 @@ Test.prototype.serialize = function serialize() { return { $$currentRetry: this._currentRetry, $$fullTitle: this.fullTitle(), + $$isPending: this.pending, $$retriedTest: this._retriedTest || null, $$slow: this._slow, $$titlePath: this.titlePath(), @@ -82,6 +83,7 @@ Test.prototype.serialize = function serialize() { $$fullTitle: this.parent.fullTitle() }, speed: this.speed, + state: this.state, title: this.title, type: this.type }; From 2c3b56422eb613677f55b517f775527ab16d9c88 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:29:29 -0700 Subject: [PATCH 31/92] --file and --parallel are now mutually exclusive --- lib/cli/run.js | 7 +++++++ test/integration/options/parallel.spec.js | 18 ++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/lib/cli/run.js b/lib/cli/run.js index 84e358b3fe..dd48a36d2e 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -285,6 +285,13 @@ exports.builder = yargs => ); } + // yargs.conflicts() can't deal with `--file foo.js --no-parallel`, either + if (argv.file && argv.parallel) { + throw createUnsupportedError( + '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --file' + ); + } + if (argv.compilers) { throw createUnsupportedError( `--compilers is DEPRECATED and no longer supported. diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index a564ebe204..a52c602bea 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -53,4 +53,22 @@ describe('--parallel', function() { ).and('when fulfilled', 'to have exit code', 1); }); }); + + describe('when used with --file', function() { + it('should error out', function() { + return expect( + invokeMochaAsync( + [ + '--file', + path.join('options', 'parallel', 'test-a.fixture.js'), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /mutually exclusive with --file/ + ); + }); + }); }); From 10cd2c4f5daeafad67afceb14f5d0318662f8da0 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:29:49 -0700 Subject: [PATCH 32/92] add --parallel test for --bail and for each reporter --- test/integration/options/parallel.spec.js | 62 ++++++++++++++++++++++- 1 file changed, 61 insertions(+), 1 deletion(-) diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index a52c602bea..02522533b1 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -1,5 +1,5 @@ 'use strict'; - +var Mocha = require('../../../lib/mocha'); var path = require('path'); var helpers = require('../helpers'); var runMochaAsync = helpers.runMochaAsync; @@ -71,4 +71,64 @@ describe('--parallel', function() { ); }); }); + + describe('when used with --bail', function() { + it('should skip some tests', function() { + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--parallel', '--bail'] + ).then(function(result) { + // we don't know _exactly_ how many tests will be skipped here + // due to the --bail, but the number of tests completed should be + // less than the total, which is 5. + return expect( + result.passing + result.pending + result.failing, + 'to be less than', + 5 + ); + }); + }); + + it('should fail', function() { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel', + '--bail' + ]), + 'when fulfilled', + 'to have failed' + ); + }); + }); + + // each reporter name is duplicated; one is in all lower-case + Object.keys(Mocha.reporters) + .filter(function(name) { + return /^[a-z]/.test(name); + }) + .forEach(function(reporter) { + describe('when used with --reporter=' + reporter, function() { + it('should have the same result as run with --no-parallel', function() { + this.timeout(5000); + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--reporter', reporter, '--no-parallel'] + ).then(function(expected) { + return expect( + runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--reporter', reporter, '--parallel'] + ), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + }); + }); }); From 4cfe37cd37d8c19f02733de844a0d5c0586c1ee9 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:45:31 -0700 Subject: [PATCH 33/92] fix stack prop access on custom errors --- lib/serializer.js | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/lib/serializer.js b/lib/serializer.js index fb8c4abde0..3d87232f9e 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -175,20 +175,20 @@ class SerializableEvent { */ static _serialize(pairs, parent, key) { let value = parent[key]; + if (type(value) === 'error' || value instanceof Error) { + // we need to reference the stack prop b/c it's lazily-loaded. + // `__type` is necessary for deserialization to create an `Error` later. + // `message` is apparently not enumerable, so we must handle it specifically. + value = Object.assign(Object.create(null), value, { + stack: value.stack, + message: value.message, + __type: 'Error' + }); + parent[key] = value; + // after this, the result of type(value) will be `object`, and we'll throw + // whatever other junk is in the original error into the new `value`. + } switch (type(value)) { - case 'error': - // we need to reference the stack prop b/c it's lazily-loaded. - // `__type` is necessary for deserialization to create an `Error` later. - // fall through to the 'object' branch below to further process & remove - // any junk that an assertion lib may throw in there. - // `message` is apparently not enumerable, so we must handle it specifically. - value = Object.assign(Object.create(null), value, { - stack: value.stack, - message: value.message, - __type: 'Error' - }); - parent[key] = value; - // falls through case 'object': if (type(value.serialize) === 'function') { parent[key] = value.serialize(); From d5d6e898a29f59d15a47042638cb09ccd10e3603 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Sun, 29 Mar 2020 14:45:48 -0700 Subject: [PATCH 34/92] add --parallel test for ESM --- test/integration/options/parallel.spec.js | 56 ++++++++++++++++++----- 1 file changed, 45 insertions(+), 11 deletions(-) diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 02522533b1..06463821ba 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -6,17 +6,51 @@ var runMochaAsync = helpers.runMochaAsync; var invokeMochaAsync = helpers.invokeMochaAsync; describe('--parallel', function() { - it('should not appear fundamentally different than without', function() { - return expect( - runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ - '--parallel' - ]), - 'when fulfilled', - 'to have failed' - ) - .and('when fulfilled', 'to have passed test count', 2) - .and('when fulfilled', 'to have pending test count', 1) - .and('when fulfilled', 'to have failed test count', 2); + describe('when used with CJS tests', function() { + it('should have the same result as with --no-parallel', function() { + this.timeout(5000); + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--no-parallel'] + ).then(function(expected) { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel' + ]), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + }); + + describe('when used with ESM tests', function() { + it('should have the same result as with --no-parallel', function() { + this.timeout(5000); + return runMochaAsync( + path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs'), + ['--no-parallel'] + ).then(function(expected) { + return expect( + runMochaAsync( + path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs'), + ['--parallel'] + ), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); }); describe('when used with --retries', function() { From 291b40a5cdc820899bbb1da14c9c325ec3588bb2 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 30 Mar 2020 15:30:32 -0700 Subject: [PATCH 35/92] fix serialization of circular data structures - more useful debug info - renamed a couple debug namespaces; doing `DEBUG=mocha:parallel*` now provides good general-purpose info --- lib/buffered-runner.js | 25 ++++++++++++++++++++++--- lib/serializer.js | 13 +++++++++++-- lib/worker.js | 8 +++++++- 3 files changed, 40 insertions(+), 6 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index c5587dbcb8..9ce031c09c 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -3,11 +3,23 @@ const os = require('os'); const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; -const debug = require('debug')('mocha:buffered-runner'); +const debug = require('debug')('mocha:parallel:buffered-runner'); const workerpool = require('workerpool'); const {deserialize} = require('./serializer'); const {type} = require('./utils'); const WORKER_PATH = require.resolve('./worker.js'); +const {setInterval, clearInterval} = global; + +const debugStats = pool => { + const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); + debug( + '%d/%d busy workers; %d idle; %d remaining tasks', + busyWorkers, + totalWorkers, + idleWorkers, + pendingTasks + ); +}; /** * This `Runner` delegates tests runs to worker threads. Does not execute any @@ -40,10 +52,12 @@ class BufferedRunner extends Runner { // However, we want to make use of `async`/`await`, so we use this // IIFE. (async () => { + let t; + let pool; try { const jobs = opts.jobs || os.cpus().length - 1; debug('starting pool with %d max workers', jobs); - const pool = workerpool.pool(WORKER_PATH, { + pool = workerpool.pool(WORKER_PATH, { workerType: 'process', maxWorkers: jobs }); @@ -55,12 +69,13 @@ class BufferedRunner extends Runner { this.emit(EVENT_RUN_BEGIN); const poolProxy = await pool.proxy(); + + t = setInterval(() => debugStats(pool), 5000).unref(); await Promise.all( files.map(async file => { debug('enqueueing test file %s', file); try { const result = await poolProxy.run(file, opts); - debug(result); const {failures, events} = deserialize(result); debug( 'completed run of file %s; %d failures / %d events', @@ -92,6 +107,7 @@ class BufferedRunner extends Runner { didAbort = true; await pool.terminate(true); } + debug('all events reported for file %s', file); } catch (err) { if (!didAbort) { debug('terminating pool due to uncaught exception'); @@ -118,6 +134,9 @@ class BufferedRunner extends Runner { process.nextTick(() => { throw err; }); + } finally { + clearInterval(t); + debugStats(pool); } })(); } diff --git a/lib/serializer.js b/lib/serializer.js index 3d87232f9e..3d1c62b06c 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -172,9 +172,14 @@ class SerializableEvent { * @param {[object, string][]} pairs - List of parent/key tuples to process; modified in-place * @param {object} parent - Some parent object * @param {string} key - Key to inspect + * @param {WeakSet} seenObjects - For avoiding circular references */ - static _serialize(pairs, parent, key) { + static _serialize(pairs, parent, key, seenObjects) { let value = parent[key]; + if (seenObjects.has(value)) { + parent[key] = Object.create(null); + return; + } if (type(value) === 'error' || value instanceof Error) { // we need to reference the stack prop b/c it's lazily-loaded. // `__type` is necessary for deserialization to create an `Error` later. @@ -223,6 +228,8 @@ class SerializableEvent { * the result in `SerializableEvent#data`) and `SerializableEvent#error`. * Freezes this object. The result is an object that can be transmitted over * IPC. + * If this quickly becomes unmaintainable, we will want to move towards immutable + * objects post-haste. */ serialize() { // given a parent object and a key, inspect the value and decide whether @@ -239,10 +246,12 @@ class SerializableEvent { }); const pairs = Object.keys(result).map(key => [result, key]); + const seenObjects = new WeakSet(); let pair; while ((pair = pairs.shift())) { - SerializableEvent._serialize(pairs, ...pair); + SerializableEvent._serialize(pairs, ...pair, seenObjects); + seenObjects.add(pair[0]); } this.data = result.data; diff --git a/lib/worker.js b/lib/worker.js index 35c1c400cb..354ccb0594 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -4,8 +4,9 @@ const {createInvalidArgumentTypeError} = require('./errors'); const workerpool = require('workerpool'); const Mocha = require('./mocha'); const {handleRequires, validatePlugin} = require('./cli/run-helpers'); -const debug = require('debug')(`mocha:worker:${process.pid}`); +const debug = require('debug')(`mocha:parallel:worker:${process.pid}`); const {serialize} = require('./serializer'); +const {setInterval, clearInterval} = global; const BUFFERED_REPORTER_PATH = require.resolve('./reporters/buffered'); @@ -74,6 +75,9 @@ async function run(filepath, argv = {ui: 'bdd'}) { } return new Promise((resolve, reject) => { + const t = setInterval(() => { + debug('still running %s...', filepath); + }, 5000).unref(); mocha.run(result => { // Runner adds these; if we don't remove them, we'll get a leak. process.removeAllListeners('uncaughtException'); @@ -88,6 +92,8 @@ async function run(filepath, argv = {ui: 'bdd'}) { // rejection should only happen if an error is "unrecoverable" debug('rejecting: %O', err); reject(err); + } finally { + clearInterval(t); } }); }); From 42e466c2682e1b6e13b7a9ad8e11c254afeb20ad Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 30 Mar 2020 17:33:41 -0700 Subject: [PATCH 36/92] --parallel is mutually exclusive with --sort --- lib/cli/run.js | 22 ++++++++++++++++------ test/integration/options/parallel.spec.js | 18 ++++++++++++++++++ 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/lib/cli/run.js b/lib/cli/run.js index dd48a36d2e..5160d845e3 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -231,7 +231,8 @@ exports.builder = yargs => }, sort: { description: 'Sort test files', - group: GROUPS.FILES + group: GROUPS.FILES, + conflicts: 'parallel' }, timeout: { default: defaults.timeout, @@ -285,11 +286,20 @@ exports.builder = yargs => ); } - // yargs.conflicts() can't deal with `--file foo.js --no-parallel`, either - if (argv.file && argv.parallel) { - throw createUnsupportedError( - '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --file' - ); + if (argv.parallel) { + // yargs.conflicts() can't deal with `--file foo.js --no-parallel`, either + if (argv.file) { + throw createUnsupportedError( + '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --file' + ); + } + + // or this + if (argv.sort) { + throw createUnsupportedError( + '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --sort' + ); + } } if (argv.compilers) { diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 06463821ba..584210aeb1 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -106,6 +106,24 @@ describe('--parallel', function() { }); }); + describe('when used with --sort', function() { + it('should error out', function() { + return expect( + invokeMochaAsync( + [ + '--sort', + path.join('options', 'parallel', 'test-*.fixture.js'), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to contain output', + /mutually exclusive with --sort/ + ); + }); + }); + describe('when used with --bail', function() { it('should skip some tests', function() { return runMochaAsync( From c9c37795df6b29e1db0f253aadc607175fb681e2 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 30 Mar 2020 17:34:01 -0700 Subject: [PATCH 37/92] don't test --parallel w/ "base" reporter --- test/integration/options/parallel.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 584210aeb1..2946c5191d 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -156,7 +156,7 @@ describe('--parallel', function() { // each reporter name is duplicated; one is in all lower-case Object.keys(Mocha.reporters) .filter(function(name) { - return /^[a-z]/.test(name); + return name !== 'base' && /^[a-z]/.test(name); }) .forEach(function(reporter) { describe('when used with --reporter=' + reporter, function() { From fc912e820ee552e37e64105490df49f9583c4f6a Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 30 Mar 2020 17:36:35 -0700 Subject: [PATCH 38/92] remove unnecessary bail handling - add more realistic tests around bail usage - serializer: rename `failures` to `failureCount` to be more obvious - warn if a single file is executed with `--parallel` - update some comments / debug statements --- lib/buffered-runner.js | 91 ++++++++++++------- lib/serializer.js | 8 +- .../fixtures/options/parallel/bail.fixture.js | 9 ++ test/integration/options/parallel.spec.js | 22 +++++ test/node-unit/buffered-runner.spec.js | 72 +++++++++++---- 5 files changed, 147 insertions(+), 55 deletions(-) create mode 100644 test/integration/fixtures/options/parallel/bail.fixture.js diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 9ce031c09c..a31a513f63 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -6,10 +6,9 @@ const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; const debug = require('debug')('mocha:parallel:buffered-runner'); const workerpool = require('workerpool'); const {deserialize} = require('./serializer'); -const {type} = require('./utils'); +const {warn} = require('./utils'); const WORKER_PATH = require.resolve('./worker.js'); const {setInterval, clearInterval} = global; - const debugStats = pool => { const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); debug( @@ -28,12 +27,13 @@ const debugStats = pool => { class BufferedRunner extends Runner { /** * Runs Mocha tests by creating a thread pool, then delegating work to the - * worker threads. Each worker receives one file, and as workers become - * available, they take a file from the queue and run it. - * The worker thread execution is treated like an RPC--it returns a `Promise` - * containing serialized information about the run. The information is processed - * as it's received, and emitted to a {@link Reporter}, which is likely listening - * for these events. + * worker threads. + * + * Each worker receives one file, and as workers become available, they take a + * file from the queue and run it. The worker thread execution is treated like + * an RPC--it returns a `Promise` containing serialized information about the + * run. The information is processed as it's received, and emitted to a + * {@link Reporter}, which is likely listening for these events. * * @todo handle tests in a specific order, e.g., via `--file`? * @todo handle delayed runs? @@ -47,64 +47,91 @@ class BufferedRunner extends Runner { * @param {Options} option.opts - Command-line options */ run(callback, {files, opts}) { - // This function should _not_ return a `Promise`; its parent - // (`Runner#run`) is a void function, so this should be as well. - // However, we want to make use of `async`/`await`, so we use this - // IIFE. + // This function should _not_ return a `Promise`; its parent (`Runner#run`) + // is a void function, so this should be as well. However, we want to make + // use of `async`/`await`, so we use this IIFE. (async () => { - let t; + /** + * This is an interval that outputs stats about the worker pool every so often + */ + let debugInterval; + + /** + * @type {import('workerpool').WorkerPool} + */ let pool; + try { + if (files.length === 1) { + // using --parallel on a single file is slower than it needs to be. + warn( + '(Mocha) Only one test file detected; consider omitting --parallel' + ); + } const jobs = opts.jobs || os.cpus().length - 1; debug('starting pool with %d max workers', jobs); pool = workerpool.pool(WORKER_PATH, { workerType: 'process', maxWorkers: jobs }); - const globalBail = type(opts.bail) === 'boolean' && opts.bail; - let exitCode = 0; + let totalFailureCount = 0; + + /** + * This flag is used by all workers in the pool; it tells them that we + * aborted _on purpose_, because of "bail". If it's _not_ true, an + * abnormal termination of the worker process is considered a fatal + * error. + */ let didAbort = false; this.emit(EVENT_RUN_BEGIN); + // the "pool proxy" object is essentially just syntactic sugar to call a + // worker's procedure as one would a regular function. const poolProxy = await pool.proxy(); - t = setInterval(() => debugStats(pool), 5000).unref(); + debugInterval = setInterval(() => debugStats(pool), 5000).unref(); + await Promise.all( files.map(async file => { debug('enqueueing test file %s', file); try { const result = await poolProxy.run(file, opts); - const {failures, events} = deserialize(result); + const {failureCount, events} = deserialize(result); debug( 'completed run of file %s; %d failures / %d events', file, - failures, + failureCount, events.length ); - exitCode += failures; // can this be non-numeric? + totalFailureCount += failureCount; // can this ever be non-numeric? + /** + * If we set this, then we encountered a "bail" flag, and will + * terminate the pool once all events have been emitted. + */ let shouldAbort = false; let event = events.shift(); while (event) { this.emit(event.eventName, event.data, event.error); - // prefer event.data._bail over globalBail, if the former is - // set, since it could be disabled on any given Runnable. - // if event.data is falsy, event.error will be as well. if ( - (failures || event.error) && + (totalFailureCount || event.error) && event.data && - (type(event.data._bail) === 'boolean' - ? event.data._bail - : globalBail) + event.data._bail ) { - debug('terminating pool due to "bail" flag'); + debug( + 'nonzero failure count & found bail flag in event: %O', + event + ); + // we need to let the events complete for this file, as the worker + // should run any cleanup hooks shouldAbort = true; } event = events.shift(); } if (shouldAbort) { didAbort = true; + debug('terminating pool due to "bail" flag'); await pool.terminate(true); } debug('all events reported for file %s', file); @@ -115,7 +142,7 @@ class BufferedRunner extends Runner { await pool.terminate(true); this.uncaught(err); } else { - debug('thread pool terminated; skipping file %s', file); + debug('worker pool terminated; skipping file %s', file); } } finally { debug('done running file %s', file); @@ -123,11 +150,13 @@ class BufferedRunner extends Runner { }) ); + // note that if we aborted due to "bail", this will have happened + // already await pool.terminate(); this.emit(EVENT_RUN_END); - debug('exiting with code %d', exitCode); - callback(exitCode); + debug('exiting with code %d', totalFailureCount); + callback(totalFailureCount); } catch (err) { // this is an "unknown" error; probably from 3p code /* istanbul ignore next */ @@ -135,7 +164,7 @@ class BufferedRunner extends Runner { throw err; }); } finally { - clearInterval(t); + clearInterval(debugInterval); debugStats(pool); } })(); diff --git a/lib/serializer.js b/lib/serializer.js index 3d1c62b06c..5ff523faf6 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -18,14 +18,14 @@ class SerializableWorkerResult { * list of events; but since we're already doing the work, might as well use * it. * @param {SerializableEvent[]} [events=[]] - Events to eventually serialize - * @param {number} [failures=0] - Failure count + * @param {number} [failureCount=0] - Failure count */ - constructor(events = [], failures = 0) { + constructor(events = [], failureCount = 0) { /** * The number of failures in this run * @type {number} */ - this.failures = failures; + this.failureCount = failureCount; /** * All relevant events emitted from the {@link Runner}. * @type {SerializableEvent[]} @@ -386,7 +386,7 @@ exports.SerializableWorkerResult = SerializableWorkerResult; * The result of calling `SerializableWorkerResult.serialize` as received * by the deserializer. * @typedef {Object} SerializedWorkerResult - * @property {number} failures - Number of failures + * @property {number} failureCount - Number of failures * @property {SerializedEvent[]} events - Serialized events * @property {"SerializedWorkerResult"} __type - Symbol-like to denote the type of object this is */ diff --git a/test/integration/fixtures/options/parallel/bail.fixture.js b/test/integration/fixtures/options/parallel/bail.fixture.js new file mode 100644 index 0000000000..77692d56d1 --- /dev/null +++ b/test/integration/fixtures/options/parallel/bail.fixture.js @@ -0,0 +1,9 @@ +describe('some suite', function() { + this.bail(true); + + it('should bail', function() { + throw new Error(); + }); + + it('will not get run', function() {}); +}); diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 2946c5191d..0840b63619 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -153,6 +153,28 @@ describe('--parallel', function() { }); }); + describe('when encountering a "bail" in context', function() { + it('should skip some tests', function() { + return runMochaAsync('options/parallel/bail', ['--parallel']).then( + function(result) { + return expect( + result.passing + result.pending + result.failing, + 'to be less than', + 2 + ); + } + ); + }); + + it('should fail', function() { + return expect( + runMochaAsync('options/parallel/bail', ['--parallel', '--bail']), + 'when fulfilled', + 'to have failed' + ); + }); + }); + // each reporter name is duplicated; one is in all lower-case Object.keys(Mocha.reporters) .filter(function(name) { diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index d35573000d..943110bbb3 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -5,7 +5,8 @@ const { EVENT_RUN_BEGIN, EVENT_TEST_PASS, EVENT_TEST_FAIL, - EVENT_SUITE_END + EVENT_SUITE_END, + EVENT_SUITE_BEGIN } = require('../../lib/runner').constants; const rewiremock = require('rewiremock/node'); const BUFFERED_RUNNER_PATH = require.resolve('../../lib/buffered-runner.js'); @@ -34,7 +35,8 @@ describe('buffered-runner', function() { proxy: sandbox.stub().resolves({ run }), - terminate + terminate, + stats: sandbox.stub().returns({}) }); BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, () => ({ workerpool: { @@ -74,7 +76,7 @@ describe('buffered-runner', function() { const opts = {}; run.withArgs('some-file.js', opts).rejects(new Error('whoops')); run.withArgs('some-other-file.js', opts).resolves({ - failures: 0, + failureCount: 0, events: [ { eventName: EVENT_TEST_PASS, @@ -114,7 +116,7 @@ describe('buffered-runner', function() { const err = new Error('whoops'); run.withArgs('some-file.js', opts).rejects(new Error('whoops')); run.withArgs('some-other-file.js', opts).resolves({ - failures: 0, + failureCount: 0, events: [ { eventName: EVENT_TEST_PASS, @@ -187,12 +189,19 @@ describe('buffered-runner', function() { }); }); - describe('when provided global bail flag', function() { + describe('when suite should bail', function() { describe('when no event contains an error', function() { it('should not force-terminate', function(done) { run.resolves({ - failures: 0, + failureCount: 0, events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite', + _bail: true + } + }, { eventName: EVENT_TEST_PASS, data: { @@ -202,7 +211,8 @@ describe('buffered-runner', function() { { eventName: EVENT_SUITE_END, data: { - title: 'some suite' + title: 'some suite', + _bail: true } } ] @@ -217,7 +227,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts: {bail: true} + opts: {} } ); }); @@ -226,14 +236,21 @@ describe('buffered-runner', function() { describe('when an event contains an error and has positive failures', function() { describe('when subsequent files have not yet been run', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {bail: true}; + const opts = {}; const err = { __type: 'Error', message: 'oh no' }; run.withArgs('some-file.js', opts).resolves({ - failures: 1, + failureCount: 1, events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite', + _bail: true + } + }, { eventName: EVENT_TEST_FAIL, data: { @@ -244,7 +261,8 @@ describe('buffered-runner', function() { { eventName: EVENT_SUITE_END, data: { - title: 'some suite' + title: 'some suite', + _bail: true } } ] @@ -268,14 +286,21 @@ describe('buffered-runner', function() { }); describe('when subsequent files already started running', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {bail: true}; + const opts = {}; const err = { __type: 'Error', message: 'oh no' }; run.withArgs('some-file.js', opts).resolves({ - failures: 1, + failureCount: 1, events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite', + _bail: true + } + }, { eventName: EVENT_TEST_FAIL, data: { @@ -286,14 +311,21 @@ describe('buffered-runner', function() { { eventName: EVENT_SUITE_END, data: { - title: 'some suite' + title: 'some suite', + _bail: true } } ] }); run.withArgs('some-other-file.js', opts).resolves({ - failures: 0, + failureCount: 0, events: [ + { + eventName: EVENT_SUITE_BEGIN, + data: { + title: 'some suite' + } + }, { eventName: EVENT_TEST_PASS, data: { @@ -331,7 +363,7 @@ describe('buffered-runner', function() { describe('when no event contains an error', function() { it('should not force-terminate', function(done) { run.resolves({ - failures: 0, + failureCount: 0, events: [ { eventName: EVENT_TEST_PASS, @@ -372,7 +404,7 @@ describe('buffered-runner', function() { message: 'oh no' }; run.withArgs('some-file.js', opts).resolves({ - failures: 1, + failureCount: 1, events: [ { eventName: EVENT_TEST_FAIL, @@ -415,7 +447,7 @@ describe('buffered-runner', function() { message: 'oh no' }; run.withArgs('some-file.js', opts).resolves({ - failures: 1, + failureCount: 1, events: [ { eventName: EVENT_TEST_FAIL, @@ -434,7 +466,7 @@ describe('buffered-runner', function() { ] }); run.withArgs('some-other-file.js', opts).resolves({ - failures: 0, + failureCount: 0, events: [ { eventName: EVENT_TEST_PASS, @@ -475,7 +507,7 @@ describe('buffered-runner', function() { message: 'oh no' }; run.withArgs('some-file.js', opts).resolves({ - failures: 1, + failureCount: 1, events: [ { eventName: EVENT_TEST_FAIL, From c8e8938ee5bce58f306ca4ecee981b221ce71914 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 30 Mar 2020 17:36:45 -0700 Subject: [PATCH 39/92] remove cruft from wallaby config --- .wallaby.js | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/.wallaby.js b/.wallaby.js index 4bf3f6b5dc..55f7901275 100644 --- a/.wallaby.js +++ b/.wallaby.js @@ -47,17 +47,17 @@ module.exports = () => { mochaUnderTest ); // to make test/node-unit/color.spec.js pass, we need to run mocha in the project's folder context - const childProcess = require('child_process'); - const execFile = childProcess.execFile; - childProcess.execFile = function() { - let opts = arguments[2]; - if (typeof opts === 'function') { - opts = {}; - Array.prototype.splice.call(arguments, 2, 0, opts); - } - opts.cwd = wallaby.localProjectDir; - return execFile.apply(this, arguments); - }; + // const childProcess = require('child_process'); + // const execFile = childProcess.execFile; + // childProcess.execFile = function() { + // let opts = arguments[2]; + // if (typeof opts === 'function') { + // opts = {}; + // Array.prototype.splice.call(arguments, 2, 0, opts); + // } + // opts.cwd = wallaby.localProjectDir; + // return execFile.apply(this, arguments); + // }; require('./test/setup'); }, debug: true From aaef65f6449bfee9612e8e469d9a6c335b850e8d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 30 Mar 2020 17:56:20 -0700 Subject: [PATCH 40/92] fix "sort" issue Signed-off-by: Christopher Hiller --- lib/cli/run.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lib/cli/run.js b/lib/cli/run.js index 5160d845e3..3aa81b1906 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -231,8 +231,7 @@ exports.builder = yargs => }, sort: { description: 'Sort test files', - group: GROUPS.FILES, - conflicts: 'parallel' + group: GROUPS.FILES }, timeout: { default: defaults.timeout, From 1a37e18405e9d49c6d6ad14c0db9f10dc32c5bb8 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 1 Apr 2020 14:33:35 -0700 Subject: [PATCH 41/92] increase timeout test timeout for appveyor --- test/unit/timeout.spec.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/timeout.spec.js b/test/unit/timeout.spec.js index ce95edcb81..e96f4b5d23 100644 --- a/test/unit/timeout.spec.js +++ b/test/unit/timeout.spec.js @@ -14,7 +14,7 @@ describe('timeouts', function() { }); it('should allow overriding per-test', function(done) { - this.timeout(200); + this.timeout(1500); setTimeout(function() { done(); }, 50); From 1c0971c23e91639c9ea93d4632999393309c3f6c Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 1 Apr 2020 14:34:11 -0700 Subject: [PATCH 42/92] cleanup .mocharc.yml - quote strings because YAML is YAML - remove comment cruft --- .mocharc.yml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/.mocharc.yml b/.mocharc.yml index fa429404d5..5875ec1a4f 100644 --- a/.mocharc.yml +++ b/.mocharc.yml @@ -1,9 +1,8 @@ -require: test/setup -ui: bdd +require: 'test/setup' +ui: 'bdd' global: - - okGlobalA,okGlobalB - - okGlobalC - - callback* + - 'okGlobalA,okGlobalB' + - 'okGlobalC' + - 'callback*' timeout: 1000 parallel: true -# bail: true From 9d75852cdcce9fabbb64cc38fc943f356b49e04a Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 7 Apr 2020 14:47:26 -0700 Subject: [PATCH 43/92] add a watch-ignore rule for using --watch with mocha's own tests --- .mocharc.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.mocharc.yml b/.mocharc.yml index 5875ec1a4f..57bcccfa85 100644 --- a/.mocharc.yml +++ b/.mocharc.yml @@ -6,3 +6,7 @@ global: - 'callback*' timeout: 1000 parallel: true +watch-ignore: + - '.*' + - 'docs/_dist/**' + - 'node_modules' From 622d33407517341f63b16fe3a2619d41ede80bc5 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 7 Apr 2020 14:47:57 -0700 Subject: [PATCH 44/92] fix event listener leak in worker.spec.js --- test/node-unit/worker.spec.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/node-unit/worker.spec.js b/test/node-unit/worker.spec.js index 4b62a1f9b6..58b4fdbd7d 100644 --- a/test/node-unit/worker.spec.js +++ b/test/node-unit/worker.spec.js @@ -182,5 +182,7 @@ describe('worker', function() { afterEach(function() { sandbox.restore(); + // this is needed due to `require.cache` getting dumped in watch mode + process.removeAllListeners('beforeExit'); }); }); From e52fea84af6ac683dbecf3abea0193f729b95056 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 7 Apr 2020 14:54:51 -0700 Subject: [PATCH 45/92] fix return value of BufferedRunner#run - also rename `opts` to `options`for optional param to `BufferedRunner#run` - tweak warning --- lib/buffered-runner.js | 20 ++++---- test/node-unit/buffered-runner.spec.js | 70 +++++++++++++------------- 2 files changed, 45 insertions(+), 45 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index a31a513f63..cc8d9cfc6f 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -12,7 +12,7 @@ const {setInterval, clearInterval} = global; const debugStats = pool => { const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); debug( - '%d/%d busy workers; %d idle; %d remaining tasks', + '%d/%d busy workers; %d idle; %d tasks queued', busyWorkers, totalWorkers, idleWorkers, @@ -42,13 +42,12 @@ class BufferedRunner extends Runner { * @todo should we just instantiate a `Test` object from the `BufferedEvent`? * @param {Function} callback - Called with an exit code corresponding to * number of test failures. - * @param {Object} options - * @param {string[]} options.files - List of test files - * @param {Options} option.opts - Command-line options + * @param {{files: string[], options: Options}} opts - Files to run and + * command-line options, respectively. */ - run(callback, {files, opts}) { + run(callback, {files, options} = {}) { // This function should _not_ return a `Promise`; its parent (`Runner#run`) - // is a void function, so this should be as well. However, we want to make + // returns this instance, so this should do the same. However, we want to make // use of `async`/`await`, so we use this IIFE. (async () => { /** @@ -65,10 +64,10 @@ class BufferedRunner extends Runner { if (files.length === 1) { // using --parallel on a single file is slower than it needs to be. warn( - '(Mocha) Only one test file detected; consider omitting --parallel' + `(Mocha) Only one (1) test file detected (${files[0]}); consider omitting --parallel` ); } - const jobs = opts.jobs || os.cpus().length - 1; + const jobs = options.jobs || os.cpus().length - 1; debug('starting pool with %d max workers', jobs); pool = workerpool.pool(WORKER_PATH, { workerType: 'process', @@ -97,7 +96,7 @@ class BufferedRunner extends Runner { files.map(async file => { debug('enqueueing test file %s', file); try { - const result = await poolProxy.run(file, opts); + const result = await poolProxy.run(file, options); const {failureCount, events} = deserialize(result); debug( 'completed run of file %s; %d failures / %d events', @@ -155,7 +154,7 @@ class BufferedRunner extends Runner { await pool.terminate(); this.emit(EVENT_RUN_END); - debug('exiting with code %d', totalFailureCount); + debug('completing with failure count %d', totalFailureCount); callback(totalFailureCount); } catch (err) { // this is an "unknown" error; probably from 3p code @@ -168,6 +167,7 @@ class BufferedRunner extends Runner { debugStats(pool); } })(); + return this; } } diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index 943110bbb3..8d7d1fe35b 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -55,15 +55,15 @@ describe('buffered-runner', function() { // the purpose of this is to ensure that--despite using `Promise`s // internally--`BufferedRunner#run` does not return a `Promise`. - it('should return `undefined`', function(done) { - expect(runner.run(done, {files: [], opts: {}}), 'to be undefined'); + it('should be chainable', function(done) { + expect(runner.run(done, {files: [], options: {}}), 'to be', runner); }); it('should emit `EVENT_RUN_BEGIN`', async function() { return expect( () => new Promise(resolve => { - runner.run(resolve, {files: [], opts: {}}); + runner.run(resolve, {files: [], options: {}}); }), 'to emit from', runner, @@ -73,9 +73,9 @@ describe('buffered-runner', function() { describe('when a worker fails', function() { it('should cleanly force-terminate the thread pool', function(done) { - const opts = {}; - run.withArgs('some-file.js', opts).rejects(new Error('whoops')); - run.withArgs('some-other-file.js', opts).resolves({ + const options = {}; + run.withArgs('some-file.js', options).rejects(new Error('whoops')); + run.withArgs('some-other-file.js', options).resolves({ failureCount: 0, events: [ { @@ -105,17 +105,17 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); it('should delegate to Runner#uncaught', function(done) { - const opts = {}; + const options = {}; sandbox.spy(runner, 'uncaught'); const err = new Error('whoops'); - run.withArgs('some-file.js', opts).rejects(new Error('whoops')); - run.withArgs('some-other-file.js', opts).resolves({ + run.withArgs('some-file.js', options).rejects(new Error('whoops')); + run.withArgs('some-other-file.js', options).resolves({ failureCount: 0, events: [ { @@ -140,7 +140,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); @@ -160,7 +160,7 @@ describe('buffered-runner', function() { }); done(); }, - {files: [], opts: {}} + {files: [], options: {}} ); }); }); @@ -181,7 +181,7 @@ describe('buffered-runner', function() { }, { files: [], - opts: { + options: { jobs: 2 } } @@ -227,7 +227,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts: {} + options: {} } ); }); @@ -236,12 +236,12 @@ describe('buffered-runner', function() { describe('when an event contains an error and has positive failures', function() { describe('when subsequent files have not yet been run', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {}; + const options = {}; const err = { __type: 'Error', message: 'oh no' }; - run.withArgs('some-file.js', opts).resolves({ + run.withArgs('some-file.js', options).resolves({ failureCount: 1, events: [ { @@ -267,7 +267,7 @@ describe('buffered-runner', function() { } ] }); - run.withArgs('some-other-file.js', opts).rejects(); + run.withArgs('some-other-file.js', options).rejects(); runner.run( () => { @@ -279,19 +279,19 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); }); describe('when subsequent files already started running', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {}; + const options = {}; const err = { __type: 'Error', message: 'oh no' }; - run.withArgs('some-file.js', opts).resolves({ + run.withArgs('some-file.js', options).resolves({ failureCount: 1, events: [ { @@ -317,7 +317,7 @@ describe('buffered-runner', function() { } ] }); - run.withArgs('some-other-file.js', opts).resolves({ + run.withArgs('some-other-file.js', options).resolves({ failureCount: 0, events: [ { @@ -351,7 +351,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); @@ -390,7 +390,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts: {} + options: {} } ); }); @@ -398,12 +398,12 @@ describe('buffered-runner', function() { describe('when an event contains an error and has positive failures', function() { describe('when subsequent files have not yet been run', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {bail: true}; + const options = {bail: true}; const err = { __type: 'Error', message: 'oh no' }; - run.withArgs('some-file.js', opts).resolves({ + run.withArgs('some-file.js', options).resolves({ failureCount: 1, events: [ { @@ -421,7 +421,7 @@ describe('buffered-runner', function() { } ] }); - run.withArgs('some-other-file.js', opts).rejects(); + run.withArgs('some-other-file.js', options).rejects(); runner.run( () => { @@ -433,7 +433,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); @@ -441,12 +441,12 @@ describe('buffered-runner', function() { describe('when subsequent files already started running', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {}; + const options = {}; const err = { __type: 'Error', message: 'oh no' }; - run.withArgs('some-file.js', opts).resolves({ + run.withArgs('some-file.js', options).resolves({ failureCount: 1, events: [ { @@ -465,7 +465,7 @@ describe('buffered-runner', function() { } ] }); - run.withArgs('some-other-file.js', opts).resolves({ + run.withArgs('some-other-file.js', options).resolves({ failureCount: 0, events: [ { @@ -493,7 +493,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); @@ -501,12 +501,12 @@ describe('buffered-runner', function() { describe('when subsequent files have not yet been run', function() { it('should cleanly terminate the thread pool', function(done) { - const opts = {}; + const options = {}; const err = { __type: 'Error', message: 'oh no' }; - run.withArgs('some-file.js', opts).resolves({ + run.withArgs('some-file.js', options).resolves({ failureCount: 1, events: [ { @@ -525,7 +525,7 @@ describe('buffered-runner', function() { } ] }); - run.withArgs('some-other-file.js', opts).rejects(); + run.withArgs('some-other-file.js', options).rejects(); runner.run( () => { @@ -537,7 +537,7 @@ describe('buffered-runner', function() { }, { files: ['some-file.js', 'some-other-file.js'], - opts + options } ); }); From 938ce25cc4d5c37ef6290b6d0f95b6a4d9ee2cda Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 7 Apr 2020 14:55:21 -0700 Subject: [PATCH 46/92] create a FileCollectionOptions typedef --- lib/cli/collect-files.js | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/lib/cli/collect-files.js b/lib/cli/collect-files.js index 61d54ac4b3..37c236d1f5 100644 --- a/lib/cli/collect-files.js +++ b/lib/cli/collect-files.js @@ -16,13 +16,7 @@ const utils = require('../utils'); /** * Smash together an array of test files in the correct order - * @param {Object} opts - Options - * @param {string[]} opts.extension - File extensions to use - * @param {string[]} opts.spec - Files, dirs, globs to run - * @param {string[]} opts.ignore - Files, dirs, globs to ignore - * @param {string[]} opts.file - List of additional files to include - * @param {boolean} opts.recursive - Find files recursively - * @param {boolean} opts.sort - Sort test files + * @param {FileCollectionOptions} [opts] - Options * @returns {string[]} List of files to test * @private */ @@ -83,3 +77,14 @@ module.exports = ({ignore, extension, file, recursive, sort, spec} = {}) => { return files; }; + +/** + * An object to configure how Mocha gathers test files + * @typedef {Object} FileCollectionOptions + * @property {string[]} extension - File extensions to use + * @property {string[]} spec - Files, dirs, globs to run + * @property {string[]} ignore - Files, dirs, globs to ignore + * @property {string[]} file - List of additional files to include + * @property {boolean} recursive - Find files recursively + * @property {boolean} sort - Sort test files + */ From 9a8adf6dd81b2d2ed4e50019b2cdbd8f881b25e7 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 7 Apr 2020 15:02:56 -0700 Subject: [PATCH 47/92] fix debug statements to use plain strings using template strings makes them eagerly-loaded, which is not needed --- lib/cli/config.js | 6 +++--- lib/cli/options.js | 6 +++--- lib/cli/run-helpers.js | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/cli/config.js b/lib/cli/config.js index 1be9f9c4c0..28d6fd4ee4 100644 --- a/lib/cli/config.js +++ b/lib/cli/config.js @@ -42,11 +42,11 @@ const parsers = (exports.parsers = { js: filepath => { const cwdFilepath = path.resolve(filepath); try { - debug(`parsers: load using cwd-relative path: "${cwdFilepath}"`); + debug('parsers: load using cwd-relative path: "%s"', cwdFilepath); return require(cwdFilepath); } catch (err) { if (isModuleNotFoundError(err)) { - debug(`parsers: retry load as module-relative path: "${filepath}"`); + debug('parsers: retry load as module-relative path: "%s"', filepath); return require(filepath); } else { throw err; // rethrow @@ -69,7 +69,7 @@ const parsers = (exports.parsers = { */ exports.loadConfig = filepath => { let config = {}; - debug(`loadConfig: "${filepath}"`); + debug('loadConfig: "%s"', filepath); const ext = path.extname(filepath); try { diff --git a/lib/cli/options.js b/lib/cli/options.js index 9f9f988e70..554a294b90 100644 --- a/lib/cli/options.js +++ b/lib/cli/options.js @@ -178,16 +178,16 @@ const loadPkgRc = (args = {}) => { try { const pkg = JSON.parse(fs.readFileSync(filepath, 'utf8')); if (pkg.mocha) { - debug(`'mocha' prop of package.json parsed:`, pkg.mocha); + debug('`mocha` prop of package.json parsed: %O', pkg.mocha); result = pkg.mocha; } else { - debug(`no config found in ${filepath}`); + debug('no config found in %s', filepath); } } catch (err) { if (args.package) { throw new Error(`Unable to read/parse ${filepath}: ${err}`); } - debug(`failed to read default package.json at ${filepath}; ignoring`); + debug('failed to read default package.json at %s; ignoring', filepath); } } return result; diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index 25000e8f9b..0c2baa7d5d 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -101,7 +101,7 @@ exports.handleRequires = (requires = []) => { */ const singleRun = async (mocha, {exit}, fileCollectParams) => { const files = collectFiles(fileCollectParams); - debug('running tests with files', files); + debug('single run with %d file(s)', files.length); mocha.files = files; await mocha.loadFilesAsync(); From 525cb7c805185b0446d8b59ef79c11c9b7fed044 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 15:30:43 -0700 Subject: [PATCH 48/92] reporter fixes for --parallel - `tap` reporter must report the plan at the END, which is spec-compliant. - `progress` reporter is incompatible because we do not know the total number of tests up-front. - `markdown` reporter _could_ be fixed, but it's too much effort atm. the reason is we don't have a single root suite; we'd need to build the TOC based on every `EVENT_SUITE_BEGIN` begin where the parameter (a deserialized `Suite`) has a `root` prop of `true`. `Suite#serialize` would need to recursively call `serialize()` on each item in its `suites` prop. - added a `SIGINT` handler to the `landing` reporter to restore the cursor and not befoul terminals --- lib/cli/run.js | 12 + lib/reporters/landing.js | 12 +- lib/reporters/tap.js | 3 +- test/integration/options/parallel.spec.js | 272 +++++++++++++++++++--- test/reporters/tap.spec.js | 48 +--- 5 files changed, 266 insertions(+), 81 deletions(-) diff --git a/lib/cli/run.js b/lib/cli/run.js index 3aa81b1906..9bcf1bd533 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -315,6 +315,18 @@ exports.builder = yargs => ); } + if (argv.reporter === 'progress' && argv.parallel) { + throw createUnsupportedError( + '--reporter=progress is mutually exclusive with --parallel' + ); + } + + if (argv.reporter === 'markdown' && argv.parallel) { + throw createUnsupportedError( + '--reporter=markdown is mutually exclusive with --parallel' + ); + } + // load requires first, because it can impact "plugin" validation handleRequires(argv.require); validatePlugin(argv, 'reporter', Mocha.reporters); diff --git a/lib/reporters/landing.js b/lib/reporters/landing.js index a6af946c42..c49c10781b 100644 --- a/lib/reporters/landing.js +++ b/lib/reporters/landing.js @@ -56,11 +56,12 @@ function Landing(runner, options) { var self = this; var width = (Base.window.width * 0.75) | 0; - var total = runner.total; var stream = process.stdout; + var plane = color('plane', '✈'); var crashed = -1; var n = 0; + var total = 0; function runway() { var buf = Array(width).join('-'); @@ -74,8 +75,7 @@ function Landing(runner, options) { runner.on(EVENT_TEST_END, function(test) { // check if the plane crashed - var col = crashed === -1 ? ((width * ++n) / total) | 0 : crashed; - + var col = crashed === -1 ? ((width * ++n) / ++total) | 0 : crashed; // show the crash if (test.state === STATE_FAILED) { plane = color('plane crash', '✈'); @@ -100,6 +100,12 @@ function Landing(runner, options) { }); } +// if cursor is hidden when we ctrl-C, then it will remain hidden unless... +process.once('SIGINT', function() { + cursor.show(); + process.emit('SIGINT'); +}); + /** * Inherit from `Base.prototype`. */ diff --git a/lib/reporters/tap.js b/lib/reporters/tap.js index 12257a745f..aa79fb19f1 100644 --- a/lib/reporters/tap.js +++ b/lib/reporters/tap.js @@ -50,9 +50,7 @@ function TAP(runner, options) { this._producer = createProducer(tapVersion); runner.once(EVENT_RUN_BEGIN, function() { - var ntests = runner.grepTotal(runner.suite); self._producer.writeVersion(); - self._producer.writePlan(ntests); }); runner.on(EVENT_TEST_END, function() { @@ -204,6 +202,7 @@ TAPProducer.prototype.writeEpilogue = function(stats) { println('# pass ' + stats.passes); // :TBD: Why are we not showing pending results? println('# fail ' + stats.failures); + this.writePlan(stats.passes + stats.failures + stats.pending); }; /** diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 0840b63619..92bdbc2329 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -5,6 +5,38 @@ var helpers = require('../helpers'); var runMochaAsync = helpers.runMochaAsync; var invokeMochaAsync = helpers.invokeMochaAsync; +function compareReporters(reporter) { + this.timeout(5000); + return runMochaAsync(path.join('options', 'parallel', 'test-a.fixture.js'), [ + '--reporter', + reporter, + '--no-parallel' + ]).then(function(expected) { + expected.output = expected.output.replace(/\d+ms/g, /100ms/); + return runMochaAsync( + path.join('options', 'parallel', 'test-a.fixture.js'), + ['--reporter', reporter, '--parallel'] + ).then(function(actual) { + actual.output = actual.output.replace(/\d+ms/g, /100ms/); + return [actual, expected]; + }); + }); +} + +function runGenericReporterTest(reporter) { + return compareReporters.call(this, reporter).then(function(result) { + var expected = result.shift(); + var actual = result.shift(); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: expected.output + }); + }); +} + describe('--parallel', function() { describe('when used with CJS tests', function() { it('should have the same result as with --no-parallel', function() { @@ -32,15 +64,11 @@ describe('--parallel', function() { describe('when used with ESM tests', function() { it('should have the same result as with --no-parallel', function() { this.timeout(5000); - return runMochaAsync( - path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs'), - ['--no-parallel'] - ).then(function(expected) { + return runMochaAsync(path.join('esm', '*.fixture.mjs'), [ + '--no-parallel' + ]).then(function(expected) { return expect( - runMochaAsync( - path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs'), - ['--parallel'] - ), + runMochaAsync(path.join('esm', '*.fixture.mjs'), ['--parallel']), 'to be fulfilled with value satisfying', { passing: expected.passing, @@ -175,34 +203,210 @@ describe('--parallel', function() { }); }); - // each reporter name is duplicated; one is in all lower-case - Object.keys(Mocha.reporters) - .filter(function(name) { - return name !== 'base' && /^[a-z]/.test(name); - }) - .forEach(function(reporter) { - describe('when used with --reporter=' + reporter, function() { - it('should have the same result as run with --no-parallel', function() { - this.timeout(5000); - return runMochaAsync( - path.join('options', 'parallel', 'test-*.fixture.js'), - ['--reporter', reporter, '--no-parallel'] - ).then(function(expected) { - return expect( - runMochaAsync( + describe('reporter equivalence', function() { + // each reporter name is duplicated; one is in all lower-case + // 'base' is abstract, 'html' is browser-only, and 'progress' & `markdown` are incompatible + var DENY = ['progress', 'base', 'html', 'markdown']; + Object.keys(Mocha.reporters) + .filter(function(name) { + return /^[a-z]/.test(name) && DENY.indexOf(name) === -1; + }) + .forEach(function(reporter) { + describe( + 'when multiple test files run with --reporter=' + reporter, + function() { + it('should have the same result as when run with --no-parallel', function() { + // note that the output may not be in the same order, as running file + // order is non-deterministic in parallel mode + this.timeout(5000); + return runMochaAsync( path.join('options', 'parallel', 'test-*.fixture.js'), - ['--reporter', reporter, '--parallel'] - ), - 'to be fulfilled with value satisfying', - { - passing: expected.passing, - failing: expected.failing, - pending: expected.pending, - code: expected.code - } - ); - }); + ['--reporter', reporter, '--no-parallel'] + ).then(function(expected) { + return expect( + runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--reporter', reporter, '--parallel'] + ), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + } + ); + }); + }); + + describe('when a single test file is run with --reporter=dot', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'dot'); + }); + }); + + describe('when a single test file is run with --reporter=doc', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'doc'); + }); + }); + + describe('when a single test file is run with --reporter=tap', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'tap'); + }); + }); + + describe('when a single test file is run with --reporter=list', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'list'); + }); + }); + + describe('when a single test file is run with --reporter=min', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'min'); + }); + }); + + describe('when a single test file is run with --reporter=spec', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'spec'); + }); + }); + + describe('when used with --reporter=nyan', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'nyan'); + }); + }); + + describe('when a single test file is run with --reporter=markdown', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'markdown'); + }); + }); + + describe('when a single test file is run with --reporter=landing', function() { + it('should have the same output as when run with --no-parallel', function() { + return runGenericReporterTest.call(this, 'landing'); + }); + }); + + describe('when a single test file is run with --reporter=progress', function() { + it('should fail due to incompatibility', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/test-a.fixture.js'), + '--reporter=progress', + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ).and('when fulfilled', 'to contain output', /mutually exclusive/); + }); + }); + + describe('when a single test file is run with --reporter=markdown', function() { + it('should fail due to incompatibility', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/test-a.fixture.js'), + '--reporter=markdown', + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ).and('when fulfilled', 'to contain output', /mutually exclusive/); + }); + }); + + describe('when a single test file is run with --reporter=json', function() { + it('should have the same output as when run with --no-parallel', function() { + // this one has some timings/durations that we can safely ignore + return compareReporters.call(this, 'json').then(function(result) { + var expected = result.shift(); + expected.output = JSON.parse(expected.output); + var actual = result.shift(); + actual.output = JSON.parse(actual.output); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: { + stats: { + suites: expected.output.stats.suites, + tests: expected.output.stats.tests, + passes: expected.output.stats.passes, + pending: expected.output.stats.pending, + failures: expected.output.stats.failures + }, + tests: expected.tests + } }); }); }); + }); + + describe('when a single test file is run with --reporter=json-stream', function() { + it('should have the same output as when run with --no-parallel', function() { + // this one has some timings/durations that we can safely ignore + return compareReporters.call(this, 'json-stream').then(function(result) { + var expected = result.shift(); + expected.output = JSON.parse(expected.output); + var actual = result.shift(); + actual.output = JSON.parse(actual.output); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: { + stats: { + suites: expected.output.stats.suites, + tests: expected.output.stats.tests, + passes: expected.output.stats.passes, + pending: expected.output.stats.pending, + failures: expected.output.stats.failures + }, + tests: expected.tests + } + }); + }); + }); + }); + + describe('when a single test file is run with --reporter=xunit', function() { + it('should have the same output as when run with --no-parallel', function() { + // durations need replacing + return compareReporters.call(this, 'xunit').then(function(result) { + var expected = result.shift(); + expected.output = expected.output + .replace(/time=".+?"/g, 'time="0.5"') + .replace(/timestamp=".+?"/g, 'timestamp="some-timestamp'); + var actual = result.shift(); + actual.output = actual.output + .replace(/time=".+?"/g, 'time="0.5"') + .replace(/timestamp=".+?"/g, 'timestamp="some-timestamp'); + return expect(actual, 'to satisfy', { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code, + output: expected.output + }); + }); + }); + }); }); diff --git a/test/reporters/tap.spec.js b/test/reporters/tap.spec.js index f3bfe8d473..01294e0430 100644 --- a/test/reporters/tap.spec.js +++ b/test/reporters/tap.spec.js @@ -40,29 +40,17 @@ describe('TAP reporter', function() { describe('event handlers', function() { describe("on 'start' event", function() { var expectedSuite = 'some suite'; - var expectedTotal = 10; - var expectedString; var stdout = []; before(function() { var runner = createMockRunner('start', EVENT_RUN_BEGIN); runner.suite = expectedSuite; - runner.grepTotal = function(string) { - expectedString = string; - return expectedTotal; - }; stdout = runReporter({}, runner, options); }); it('should not write a TAP specification version', function() { expect(stdout, 'not to contain', 'TAP version'); }); - - it('should write the number of tests that it plans to run', function() { - var expectedArray = ['1..' + expectedTotal + '\n']; - expect(stdout, 'to equal', expectedArray); - expect(expectedString, 'to be', expectedSuite); - }); }); describe("on 'pending' event", function() { @@ -78,7 +66,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -102,7 +89,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -141,7 +127,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -171,7 +156,6 @@ describe('TAP reporter', function() { error ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -209,7 +193,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -245,7 +228,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -271,11 +253,10 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); - it('should write total tests, passes, and failures', function() { + it('should write total tests, passes, failures, & plan', function() { var numberOfPasses = 1; var numberOfFails = 1; var totalTests = numberOfPasses + numberOfFails; @@ -284,7 +265,8 @@ describe('TAP reporter', function() { 'not ok ' + numberOfFails + ' ' + expectedTitle + '\n', '# tests ' + totalTests + '\n', '# pass ' + numberOfPasses + '\n', - '# fail ' + numberOfFails + '\n' + '# fail ' + numberOfFails + '\n', + '1..' + totalTests + '\n' ]; expect(stdout, 'to equal', expectedArray); }); @@ -302,17 +284,11 @@ describe('TAP reporter', function() { describe('event handlers', function() { describe("on 'start' event", function() { var expectedSuite = 'some suite'; - var expectedTotal = 10; - var expectedString; var stdout; before(function() { var runner = createMockRunner('start', EVENT_RUN_BEGIN); runner.suite = expectedSuite; - runner.grepTotal = function(string) { - expectedString = string; - return expectedTotal; - }; stdout = runReporter({}, runner, options); }); @@ -321,12 +297,6 @@ describe('TAP reporter', function() { var expectedFirstLine = 'TAP version ' + tapVersion + '\n'; expect(stdout[0], 'to equal', expectedFirstLine); }); - - it('should write the number of tests that it plans to run', function() { - var expectedSecondLine = '1..' + expectedTotal + '\n'; - expect(stdout[1], 'to equal', expectedSecondLine); - expect(expectedString, 'to be', expectedSuite); - }); }); describe("on 'pending' event", function() { @@ -342,7 +312,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -366,7 +335,6 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -405,7 +373,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -438,7 +405,6 @@ describe('TAP reporter', function() { error ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -479,7 +445,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -519,7 +484,6 @@ describe('TAP reporter', function() { } }; runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); @@ -545,11 +509,10 @@ describe('TAP reporter', function() { test ); runner.suite = ''; - runner.grepTotal = noop; stdout = runReporter({}, runner, options); }); - it('should write total tests, passes, and failures', function() { + it('should write total tests, passes, failures & plan', function() { var numberOfPasses = 1; var numberOfFails = 1; var totalTests = numberOfPasses + numberOfFails; @@ -558,7 +521,8 @@ describe('TAP reporter', function() { 'not ok ' + numberOfFails + ' ' + expectedTitle + '\n', '# tests ' + totalTests + '\n', '# pass ' + numberOfPasses + '\n', - '# fail ' + numberOfFails + '\n' + '# fail ' + numberOfFails + '\n', + '1..' + totalTests + '\n' ]; expect(stdout, 'to equal', expectedArray); }); From c9bfa67ad15038ec54b797857d237df3926cfe6f Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 15:35:04 -0700 Subject: [PATCH 49/92] implement --parallel --watch - rename `loadAsync` prop of `Mocha` to `lazyLoadFiles`, which is more descriptive of its intent - refactor a lot of `watch-run.js` to support these changes - remove unneeded `runOptions` parameter of `Mocha#run` - add a docstring or two - refactor `--watch` tests to avoid assigning to the context object --- lib/cli/run-helpers.js | 96 +++++++---- lib/cli/watch-run.js | 230 +++++++++++++++++++------ lib/mocha.js | 19 +- lib/runner.js | 5 +- test/integration/options/watch.spec.js | 117 ++++++------- 5 files changed, 310 insertions(+), 157 deletions(-) diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index 0c2baa7d5d..6f762b7752 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -10,10 +10,10 @@ const fs = require('fs'); const path = require('path'); const debug = require('debug')('mocha:cli:run:helpers'); -const watchRun = require('./watch-run'); +const {watchRun, watchParallelRun} = require('./watch-run'); const collectFiles = require('./collect-files'); - -const cwd = (exports.cwd = process.cwd()); +const {type} = require('../utils'); +const cwd = process.cwd(); /** * Exits Mocha when tests + code under test has finished execution (default) @@ -73,21 +73,43 @@ exports.list = str => Array.isArray(str) ? exports.list(str.join(',')) : str.split(/ *, */); /** - * `require()` the modules as required by `--require ` + * `require()` the modules as required by `--require `. + * Sorts any require-defined root hooks into buckets to assign to the root suite + * upon `Mocha` instantiation. * @param {string[]} requires - Modules to require + * @returns {import('../mocha').MochaRootHooks} Any root hooks * @private */ -exports.handleRequires = (requires = []) => { - requires.forEach(mod => { - let modpath = mod; - if (fs.existsSync(mod, {cwd}) || fs.existsSync(`${mod}.js`, {cwd})) { - modpath = path.resolve(mod); - debug(`resolved ${mod} to ${modpath}`); +exports.handleRequires = (requires = []) => + requires.reduce( + (acc, mod) => { + let modpath = mod; + if (fs.existsSync(mod, {cwd}) || fs.existsSync(`${mod}.js`, {cwd})) { + modpath = path.resolve(mod); + debug('resolved required file %s to %s', mod, modpath); + } + const requiredModule = require(modpath); + if ( + type(requiredModule) === 'object' && + type(requiredModule.mochaHooks) === 'object' + ) { + debug('found root hooks in required file %s', mod); + const rootHooks = requiredModule.mochaHooks; + acc.beforeAll = acc.beforeAll.concat(rootHooks.beforeAll || []); + acc.beforeEach = acc.beforeEach.concat(rootHooks.beforeEach || []); + acc.afterAll = acc.afterAll.concat(rootHooks.afterAll || []); + acc.afterEach = acc.afterEach.concat(rootHooks.afterEach || []); + } + debug('loaded required file %s', mod); + return acc; + }, + { + beforeAll: [], + beforeEach: [], + afterAll: [], + afterEach: [] } - require(modpath); - debug(`loaded require "${mod}"`); - }); -}; + ); /** * Collect and load test files, then run mocha instance. @@ -104,49 +126,58 @@ const singleRun = async (mocha, {exit}, fileCollectParams) => { debug('single run with %d file(s)', files.length); mocha.files = files; + // handles ESM modules await mocha.loadFilesAsync(); return mocha.run(exit ? exitMocha : exitMochaLater); }; /** - * Collect files and run tests (using `BufferedRunner`) + * Collect files and run tests (using `BufferedRunner`). + * + * This is `async` for consistency. + * * @param {Mocha} mocha - Mocha instance - * @param {Options} opts - Command line options + * @param {Options} options - Command line options * @param {Object} fileCollectParams - Parameters that control test * file collection. See `lib/cli/collect-files.js`. - * @returns {Promise} + * @returns {Promise} + * @ignore * @private */ -const parallelRun = async (mocha, opts, fileCollectParams) => { +const parallelRun = async (mocha, options, fileCollectParams) => { const files = collectFiles(fileCollectParams); - const {jobs} = opts; debug( - `executing ${files.length} test file(s) across ${jobs} concurrent jobs` + 'executing %d test file(s) across %d concurrent jobs', + files.length, + options.jobs ); + mocha.files = files; - return mocha.run(opts.exit ? exitMocha : exitMochaLater, {files, opts}); + // note that we DO NOT load any files here; this is handled by the worker + return mocha.run(options.exit ? exitMocha : exitMochaLater); }; /** - * Actually run tests + * Actually run tests. Delegates to one of four different functions: + * - `singleRun`: run tests in serial & exit + * - `watchRun`: run tests in serial, rerunning as files change + * - `parallelRun`: run tests in parallel & exit + * - `watchParallelRun`: run tests in parallel, rerunning as files change * @param {Mocha} mocha - Mocha instance - * @param {Object} opts - Command line options + * @param {Mocha.MochaOptions} opts - Command line options * @private - * @returns {Promise} + * @returns {Promise} */ exports.runMocha = async (mocha, options) => { const { watch = false, extension = [], - exit = false, ignore = [], file = [], parallel = false, recursive = false, sort = false, - spec = [], - watchFiles, - watchIgnore + spec = [] } = options; const fileCollectParams = { @@ -158,13 +189,14 @@ exports.runMocha = async (mocha, options) => { spec }; + let run; if (watch) { - watchRun(mocha, {watchFiles, watchIgnore}, fileCollectParams); - } else if (parallel) { - await parallelRun(mocha, options, fileCollectParams); + run = parallel ? watchParallelRun : watchRun; } else { - await singleRun(mocha, {exit}, fileCollectParams); + run = parallel ? parallelRun : singleRun; } + + return run(mocha, options, fileCollectParams); }; /** diff --git a/lib/cli/watch-run.js b/lib/cli/watch-run.js index b35a906959..1f8589d4b9 100644 --- a/lib/cli/watch-run.js +++ b/lib/cli/watch-run.js @@ -1,5 +1,6 @@ 'use strict'; +const debug = require('debug')('mocha:cli:watch'); const path = require('path'); const chokidar = require('chokidar'); const Context = require('../context'); @@ -12,6 +13,42 @@ const collectFiles = require('./collect-files'); * @private */ +/** + * Run Mocha in parallel "watch" mode + * @param {Mocha} mocha - Mocha instance + * @param {Object} opts - Options + * @param {string[]} [opts.watchFiles] - List of paths and patterns to + * watch. If not provided all files with an extension included in + * `fileColletionParams.extension` are watched. See first argument of + * `chokidar.watch`. + * @param {string[]} opts.watchIgnore - List of paths and patterns to + * exclude from watching. See `ignored` option of `chokidar`. + * @param {import('./collect-files').FileCollectionOptions} fileCollectParams - Parameters that control test + * @private + */ +exports.watchParallelRun = ( + mocha, + {watchFiles, watchIgnore}, + fileCollectParams +) => { + debug('creating parallel watcher'); + return createWatcher(mocha, { + watchFiles, + watchIgnore, + beforeRun({mocha}) { + mocha.files = collectFiles(fileCollectParams); + // in parallel mode, the main Mocha process doesn't actually load the + // files. this flag prevents `mocha.run()` from autoloading. + mocha.lazyLoadFiles = true; + return mocha; + }, + afterRun({watcher}) { + blastCache(watcher); + }, + fileCollectParams + }); +}; + /** * Run Mocha in "watch" mode * @param {Mocha} mocha - Mocha instance @@ -22,27 +59,88 @@ const collectFiles = require('./collect-files'); * `chokidar.watch`. * @param {string[]} opts.watchIgnore - List of paths and patterns to * exclude from watching. See `ignored` option of `chokidar`. - * @param {Object} fileCollectParams - Parameters that control test + * @param {import('./collect-files').FileCollectionOptions} fileCollectParams - Parameters that control test * file collection. See `lib/cli/collect-files.js`. - * @param {string[]} fileCollectParams.extension - List of extensions - * to watch if `opts.watchFiles` is not given. * @private */ -module.exports = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { +exports.watchRun = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { + debug('creating serial watcher'); + // list of all test files + + return createWatcher(mocha, { + watchFiles, + watchIgnore, + beforeRun({mocha}) { + mocha.unloadFiles(); + + // I don't know why we're cloning the root suite. + const rootSuite = mocha.suite.clone(); + + // this `require` is needed because the require cache has been cleared. the dynamic + // exports set via the below call to `mocha.ui()` won't work properly if a + // test depends on this module (see `required-tokens.spec.js`). + const Mocha = require('../mocha'); + + // ... and now that we've gotten a new module, we need to use it again due + // to `mocha.ui()` call + const newMocha = new Mocha(mocha.options); + // don't know why this is needed + newMocha.suite = rootSuite; + // nor this + newMocha.suite.ctx = new Context(); + + // reset the list of files + newMocha.files = collectFiles(fileCollectParams); + + // because we've swapped out the root suite (see the `run` inner function + // in `createRerunner`), we need to call `mocha.ui()` again to set up the context/globals. + newMocha.ui(newMocha.options.ui); + + return newMocha; + }, + afterRun({watcher}) { + blastCache(watcher); + }, + fileCollectParams + }); +}; + +/** + * Bootstraps a chokidar watcher. Handles keyboard input & signals + * @param {Mocha} mocha - Mocha instance + * @param {Object} opts + * @param {BeforeWatchRun} [opts.beforeRun] - Function to call before + * `mocha.run()` + * @param {AfterWatchRun} [opts.afterRun] - Function to call after `mocha.run()` + * @param {string[]} [opts.watchFiles] - List of paths and patterns to watch. If + * not provided all files with an extension included in + * `fileColletionParams.extension` are watched. See first argument of + * `chokidar.watch`. + * @param {string[]} [opts.watchIgnore] - List of paths and patterns to exclude + * from watching. See `ignored` option of `chokidar`. + * @param {import('./collect-files').FileCollectionOptions} opts.fileCollectParams - List of extensions to watch if `opts.watchFiles` is not given. + * @returns {import('chokidar').FSWatcher} + * @ignore + * @private + */ +const createWatcher = ( + mocha, + {watchFiles, watchIgnore, beforeRun, afterRun, fileCollectParams} +) => { if (!watchFiles) { watchFiles = fileCollectParams.extension.map(ext => `**/*.${ext}`); } + debug('ignoring files matching: %s', watchIgnore); + const watcher = chokidar.watch(watchFiles, { ignored: watchIgnore, ignoreInitial: true }); - const rerunner = createRerunner(mocha, () => { - getWatchedFiles(watcher).forEach(file => { - delete require.cache[file]; - }); - mocha.files = collectFiles(fileCollectParams); + const rerunner = createRerunner(mocha, watcher, { + beforeRun, + afterRun }); watcher.on('ready', () => { @@ -53,7 +151,6 @@ module.exports = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { rerunner.scheduleRun(); }); - console.log(); hideCursor(); process.on('exit', () => { showCursor(); @@ -74,36 +171,43 @@ module.exports = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { .toLowerCase(); if (str === 'rs') rerunner.scheduleRun(); }); + + return watcher; }; /** - * Create an object that allows you to rerun tests on the mocha - * instance. `beforeRun` is called everytime before `mocha.run()` is - * called. + * Create an object that allows you to rerun tests on the mocha instance. * * @param {Mocha} mocha - Mocha instance - * @param {function} beforeRun - Called just before `mocha.run()` + * @param {import('chokidar').FSWatcher} watcher - chokidar `FSWatcher` instance + * @param {Object} [opts] - Options! + * @param {BeforeWatchRun} [opts.beforeRun] - Function to call before `mocha.run()` + * @param {AfterWatchRun} [opts.afterRun] - Function to call after `mocha.run()` + * @returns {Rerunner} + * @ignore + * @private */ -const createRerunner = (mocha, beforeRun) => { +const createRerunner = (mocha, watcher, {beforeRun, afterRun} = {}) => { // Set to a `Runner` when mocha is running. Set to `null` when mocha is not // running. let runner = null; + // true if a file has changed during a test run let rerunScheduled = false; const run = () => { - try { - beforeRun(); - resetMocha(mocha); - runner = mocha.run(() => { - runner = null; - if (rerunScheduled) { - rerun(); - } - }); - } catch (e) { - console.log(e.stack); - } + mocha = beforeRun ? beforeRun({mocha, watcher}) : mocha; + + runner = mocha.run(() => { + debug('finished watch run'); + runner = null; + afterRun && afterRun({mocha, watcher}); + if (rerunScheduled) { + rerun(); + } else { + debug('waiting for changes...'); + } + }); }; const scheduleRun = () => { @@ -136,32 +240,18 @@ const createRerunner = (mocha, beforeRun) => { * * @param watcher - Instance of a chokidar watcher * @return {string[]} - List of absolute paths + * @ignore + * @private */ const getWatchedFiles = watcher => { const watchedDirs = watcher.getWatched(); - let watchedFiles = []; - Object.keys(watchedDirs).forEach(dir => { - watchedFiles = watchedFiles.concat( - watchedDirs[dir].map(file => path.join(dir, file)) - ); - }); - return watchedFiles; -}; - -/** - * Reset the internal state of the mocha instance so that tests can be rerun. - * - * @param {Mocha} mocha - Mocha instance - * @private - */ -const resetMocha = mocha => { - mocha.unloadFiles(); - mocha.suite = mocha.suite.clone(); - mocha.suite.ctx = new Context(); - // Registers a callback on `mocha.suite` that wires new context to the DSL - // (e.g. `describe`) that is exposed as globals when the test files are - // reloaded. - mocha.ui(mocha.options.ui); + return Object.keys(watchedDirs).reduce( + (acc, dir) => [ + ...acc, + ...watchedDirs[dir].map(file => path.join(dir, file)) + ], + [] + ); }; /** @@ -189,3 +279,43 @@ const showCursor = () => { const eraseLine = () => { process.stdout.write('\u001b[2K'); }; + +/** + * Blast all of the watched files out of `require.cache` + * @param {import('chokidar').FSWatcher} watcher - chokidar FSWatcher + * @ignore + * @private + */ +const blastCache = watcher => { + const files = getWatchedFiles(watcher); + files.forEach(file => { + delete require.cache[file]; + }); + debug('deleted %d file(s) from the require cache', files.length); +}; + +/** + * Callback to be run before `mocha.run()` is called. + * Optionally, it can return a new `Mocha` instance. + * @callback BeforeWatchRun + * @private + * @param {{mocha: Mocha, watcher: import('chokidar').FSWatcher}} options + * @returns {Mocha} + */ + +/** + * Callback to be run after `mocha.run()` completes. Typically used to clear + * require cache. + * @callback AfterWatchRun + * @private + * @param {{mocha: Mocha, watcher: import('chokidar').FSWatcher}} options + * @returns {void} + */ + +/** + * Object containing run control methods + * @typedef {Object} Rerunner + * @private + * @property {() => void} run - Calls `mocha.run()` + * @property {() => void} scheduleRun - Schedules another call to `run` + */ diff --git a/lib/mocha.js b/lib/mocha.js index e142ab1229..2856a82071 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -91,6 +91,7 @@ exports.Test = require('./test'); * @param {number|string} [options.timeout] - Timeout threshold value. * @param {string} [options.ui] - Interface name. * @param {boolean} [options.parallel] - Run jobs in parallel + * @param {number} [options.jobs] - Max number of worker processes for parallel runs */ function Mocha(options) { options = utils.assign({}, mocharc, options || {}); @@ -138,9 +139,13 @@ function Mocha(options) { } }, this); - this._runner = options.parallel - ? require('./buffered-runner') - : exports.Runner; + if (options.parallel) { + this._runner = require('./buffered-runner'); + this.lazyLoadFiles = true; + } else { + this._runner = exports.Runner; + this.lazyLoadFiles = false; + } } /** @@ -345,7 +350,7 @@ Mocha.prototype.loadFiles = function(fn) { Mocha.prototype.loadFilesAsync = function() { var self = this; var suite = this.suite; - this.loadAsync = true; + this.lazyLoadFiles = true; if (!esmUtils) { return new Promise(function(resolve) { @@ -829,8 +834,8 @@ Object.defineProperty(Mocha.prototype, 'version', { * // exit with non-zero status if there were test failures * mocha.run(failures => process.exitCode = failures ? 1 : 0); */ -Mocha.prototype.run = function(fn, runOptions) { - if (this.files.length && !this.loadAsync) { +Mocha.prototype.run = function(fn) { + if (this.files.length && !this.lazyLoadFiles) { this.loadFiles(); } var suite = this.suite; @@ -869,5 +874,5 @@ Mocha.prototype.run = function(fn, runOptions) { } } - return runner.run(done, runOptions); + return runner.run(done, {files: this.files, options: options}); }; diff --git a/lib/runner.js b/lib/runner.js index c60e562a81..1636d53f3e 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -870,10 +870,11 @@ Runner.prototype.uncaughtEnd = function uncaughtEnd(err) { * * @public * @memberof Runner - * @param {Function} fn + * @param {Function} fn - Callback when finished + * @param {{files: string[], options: Options}} [opts] - For subclasses * @return {Runner} Runner instance. */ -Runner.prototype.run = function(fn) { +Runner.prototype.run = function(fn, opts) { var self = this; var rootSuite = this.suite; diff --git a/test/integration/options/watch.spec.js b/test/integration/options/watch.spec.js index f5cd382dee..2dc2e41ea9 100644 --- a/test/integration/options/watch.spec.js +++ b/test/integration/options/watch.spec.js @@ -7,24 +7,25 @@ const helpers = require('../helpers'); describe('--watch', function() { describe('when enabled', function() { + let tempDir; this.timeout(10 * 1000); this.slow(3000); beforeEach(function() { - this.tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mocha-')); + tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'mocha-')); }); afterEach(function() { - if (this.tempDir) { - return fs.remove(this.tempDir); + if (tempDir) { + return fs.remove(tempDir); } }); it('reruns test when watched test file is touched', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - return runMochaWatch([testFile], this.tempDir, () => { + return runMochaWatch([testFile], tempDir, () => { touchFile(testFile); }).then(results => { expect(results, 'to have length', 2); @@ -32,15 +33,15 @@ describe('--watch', function() { }); it('reruns test when file matching --watch-files changes', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, 'dir/file.xyz'); + const watchedFile = path.join(tempDir, 'dir/file.xyz'); touchFile(watchedFile); return runMochaWatch( [testFile, '--watch-files', 'dir/*.xyz'], - this.tempDir, + tempDir, () => { touchFile(watchedFile); } @@ -50,13 +51,13 @@ describe('--watch', function() { }); it('reruns test when file matching --watch-files is added', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, 'lib/file.xyz'); + const watchedFile = path.join(tempDir, 'lib/file.xyz'); return runMochaWatch( [testFile, '--watch-files', '**/*.xyz'], - this.tempDir, + tempDir, () => { touchFile(watchedFile); } @@ -66,15 +67,15 @@ describe('--watch', function() { }); it('reruns test when file matching --watch-files is removed', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, 'lib/file.xyz'); + const watchedFile = path.join(tempDir, 'lib/file.xyz'); touchFile(watchedFile); return runMochaWatch( [testFile, '--watch-files', 'lib/**/*.xyz'], - this.tempDir, + tempDir, () => { fs.removeSync(watchedFile); } @@ -84,15 +85,15 @@ describe('--watch', function() { }); it('does not rerun test when file not matching --watch-files is changed', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, 'dir/file.js'); + const watchedFile = path.join(tempDir, 'dir/file.js'); touchFile(watchedFile); return runMochaWatch( [testFile, '--watch-files', 'dir/*.xyz'], - this.tempDir, + tempDir, () => { touchFile(watchedFile); } @@ -102,14 +103,14 @@ describe('--watch', function() { }); it('picks up new test files when they are added', function() { - const testFile = path.join(this.tempDir, 'test/a.js'); + const testFile = path.join(tempDir, 'test/a.js'); copyFixture('__default__', testFile); return runMochaWatch( ['test/**/*.js', '--watch-files', 'test/**/*.js'], - this.tempDir, + tempDir, () => { - const addedTestFile = path.join(this.tempDir, 'test/b.js'); + const addedTestFile = path.join(tempDir, 'test/b.js'); copyFixture('passing', addedTestFile); } ).then(results => { @@ -120,28 +121,24 @@ describe('--watch', function() { }); it('reruns test when file matching --extension is changed', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, 'file.xyz'); + const watchedFile = path.join(tempDir, 'file.xyz'); touchFile(watchedFile); - return runMochaWatch( - [testFile, '--extension', 'xyz,js'], - this.tempDir, - () => { - touchFile(watchedFile); - } - ).then(results => { + return runMochaWatch([testFile, '--extension', 'xyz,js'], tempDir, () => { + touchFile(watchedFile); + }).then(results => { expect(results, 'to have length', 2); }); }); it('reruns when "rs\\n" typed', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - return runMochaWatch([testFile], this.tempDir, mochaProcess => { + return runMochaWatch([testFile], tempDir, mochaProcess => { mochaProcess.stdin.write('rs\n'); }).then(results => { expect(results, 'to have length', 2); @@ -149,54 +146,42 @@ describe('--watch', function() { }); it('reruns test when file starting with . and matching --extension is changed', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, '.file.xyz'); + const watchedFile = path.join(tempDir, '.file.xyz'); touchFile(watchedFile); - return runMochaWatch( - [testFile, '--extension', 'xyz,js'], - this.tempDir, - () => { - touchFile(watchedFile); - } - ).then(results => { + return runMochaWatch([testFile, '--extension', 'xyz,js'], tempDir, () => { + touchFile(watchedFile); + }).then(results => { expect(results, 'to have length', 2); }); }); it('ignores files in "node_modules" and ".git" by default', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const nodeModulesFile = path.join( - this.tempDir, - 'node_modules', - 'file.xyz' - ); - const gitFile = path.join(this.tempDir, '.git', 'file.xyz'); + const nodeModulesFile = path.join(tempDir, 'node_modules', 'file.xyz'); + const gitFile = path.join(tempDir, '.git', 'file.xyz'); touchFile(gitFile); touchFile(nodeModulesFile); - return runMochaWatch( - [testFile, '--extension', 'xyz,js'], - this.tempDir, - () => { - touchFile(gitFile); - touchFile(nodeModulesFile); - } - ).then(results => { + return runMochaWatch([testFile, '--extension', 'xyz,js'], tempDir, () => { + touchFile(gitFile); + touchFile(nodeModulesFile); + }).then(results => { expect(results, 'to have length', 1); }); }); it('ignores files matching --watch-ignore', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('__default__', testFile); - const watchedFile = path.join(this.tempDir, 'dir/file-to-ignore.xyz'); + const watchedFile = path.join(tempDir, 'dir/file-to-ignore.xyz'); touchFile(watchedFile); return runMochaWatch( @@ -207,7 +192,7 @@ describe('--watch', function() { '--watch-ignore', 'dir/*ignore*' ], - this.tempDir, + tempDir, () => { touchFile(watchedFile); } @@ -217,12 +202,12 @@ describe('--watch', function() { }); it('reloads test files when they change', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('options/watch/test-file-change', testFile); return runMochaWatch( [testFile, '--watch-files', '**/*.js'], - this.tempDir, + tempDir, () => { replaceFileContents( testFile, @@ -240,15 +225,15 @@ describe('--watch', function() { }); it('reloads test dependencies when they change', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('options/watch/test-with-dependency', testFile); - const dependency = path.join(this.tempDir, 'lib', 'dependency.js'); + const dependency = path.join(tempDir, 'lib', 'dependency.js'); copyFixture('options/watch/dependency', dependency); return runMochaWatch( [testFile, '--watch-files', 'lib/**/*.js'], - this.tempDir, + tempDir, () => { replaceFileContents( dependency, @@ -267,10 +252,10 @@ describe('--watch', function() { // Regression test for https://github.com/mochajs/mocha/issues/2027 it('respects --fgrep on re-runs', function() { - const testFile = path.join(this.tempDir, 'test.js'); + const testFile = path.join(tempDir, 'test.js'); copyFixture('options/grep', testFile); - return runMochaWatch([testFile, '--fgrep', 'match'], this.tempDir, () => { + return runMochaWatch([testFile, '--fgrep', 'match'], tempDir, () => { touchFile(testFile); }).then(results => { expect(results, 'to have length', 2); @@ -293,7 +278,7 @@ describe('--watch', function() { function runMochaWatch(args, cwd, change) { const [mochaProcess, resultPromise] = helpers.invokeMochaAsync( [...args, '--watch', '--reporter', 'json'], - {cwd, stdio: 'pipe'} + {cwd, stdio: ['pipe', 'pipe', 'inherit']} ); return sleep(1000) From a77532733d896d96b5dfc132cdd2ae325f74695d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 15:37:32 -0700 Subject: [PATCH 50/92] integration test helper improvements - the major change is that `STDERR` will, by default, display in the terminal. this can be used to catch problems that may otherwise have been overlooked, e.g., `test/integration/fixtures/uncaught/listeners.fixture.js` - `DEBUG` is explicitly removed from the env, so we can use it without a bunch of noise, and it will not affect inspection of child process output when `pipe` is used to grab `STDERR` from the child. --- .../fixtures/uncaught/listeners.fixture.js | 3 ++- test/integration/helpers.js | 26 ++++++++++++++----- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/test/integration/fixtures/uncaught/listeners.fixture.js b/test/integration/fixtures/uncaught/listeners.fixture.js index 3ad398cfe0..e8dcb1b944 100644 --- a/test/integration/fixtures/uncaught/listeners.fixture.js +++ b/test/integration/fixtures/uncaught/listeners.fixture.js @@ -3,7 +3,8 @@ const assert = require('assert'); const mocha = require("../../../../lib/mocha"); -for (let i = 0; i < 15; i++) { +// keep this under 10 to avoid warning +for (let i = 0; i < 9; i++) { const r = new mocha.Runner(new mocha.Suite("" + i, undefined)); r.run(); } diff --git a/test/integration/helpers.js b/test/integration/helpers.js index f8c99b55a3..dfc35326e8 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -173,7 +173,7 @@ function runMochaJSON(fixturePath, args, fn, opts) { * * If you need more granular control, try {@link invokeMochaAsync} instead. * - * @param {string} fixturePath - Path to (or name of, or basename of) fixture `.js` file + * @param {string} fixturePath - Path to (or name of, or basename of) fixture file * @param {Options} [args] - Command-line arguments to the `mocha` executable * @param {Object} [opts] - Options for `child_process.spawn`. * @returns {Promise} @@ -194,6 +194,12 @@ function runMochaAsync(fixturePath, args, opts) { }); } +/** + * Like {@link runMochaJSON}, but returns a `Promise`. + * @param {string} fixturePath - Path to (or name of, or basename of) fixture file + * @param {Options} [args] - Command-line args + * @param {Object} [opts] - Options for `child_process.spawn` + */ function runMochaJSONAsync(fixturePath, args, opts) { return new Promise(function(resolve, reject) { runMochaJSON( @@ -218,9 +224,13 @@ function runMochaJSONAsync(fixturePath, args, opts) { */ function toJSONRunResult(result) { var code = result.code; - result = JSON.parse(result.output); - result.code = code; - return result; + try { + result = JSON.parse(result.output); + result.code = code; + return result; + } catch (err) { + throw new Error(err.message); + } } /** @@ -333,16 +343,20 @@ function invokeSubMocha(args, fn, opts) { */ function _spawnMochaWithListeners(args, fn, opts) { var output = ''; + opts = opts || {}; if (opts === 'pipe') { opts = {stdio: 'pipe'}; } opts = Object.assign( { cwd: process.cwd(), - stdio: ['ignore', 'pipe', 'ignore'] + stdio: ['ignore', 'pipe', 'inherit'] }, - opts || {} + opts ); + // prevent DEBUG from borking STDERR when piping. + // delete opts.env.DEBUG; + debug('spawning: %s', [process.execPath].concat(args).join(' ')); var mocha = spawn(process.execPath, args, opts); var listener = function(data) { From 9ffe03f33677e3373936d383bfb28c7db38aa3c5 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 16:04:40 -0700 Subject: [PATCH 51/92] implement an alternative way to define root hooks; works w/ `--parallel` When a module loaded by `--require` exports an object property `mochaHooks`, it can contain four properties corresponding to the four different types of hooks. Each prop can be a function or array of functions. Here's an example using just functions: ``` // hooks.js exports.mochaHooks = { beforeAll() { console.log('beforeAll'); }, beforeEach() { console.log('beforeEach'); }, afterAll() { console.log('afterAll'); }, afterEach() { console.log('afterEach'); } }; ``` Use: ``` mocha --require hooks.js test/**/*.spec.js ``` `--file` does not work as expected before using `--parallel`, because order is non-deterministic. The `hooks.js` file (and anything else `--require`'d) will be loaded _once per child process_. --- lib/cli/run.js | 2 +- lib/mocha.js | 42 ++++++++++++++++++- lib/worker.js | 4 +- .../require/root-hook-defs-a.fixture.js | 16 +++++++ .../require/root-hook-defs-b.fixture.js | 36 ++++++++++++++++ .../options/require/root-hook-test.fixture.js | 6 +++ test/integration/options/require.spec.js | 23 ++++++++++ 7 files changed, 125 insertions(+), 4 deletions(-) create mode 100644 test/integration/fixtures/options/require/root-hook-defs-a.fixture.js create mode 100644 test/integration/fixtures/options/require/root-hook-defs-b.fixture.js create mode 100644 test/integration/fixtures/options/require/root-hook-test.fixture.js create mode 100644 test/integration/options/require.spec.js diff --git a/lib/cli/run.js b/lib/cli/run.js index 9bcf1bd533..c49da63762 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -328,7 +328,7 @@ exports.builder = yargs => } // load requires first, because it can impact "plugin" validation - handleRequires(argv.require); + argv.rootHooks = handleRequires(argv.require); validatePlugin(argv, 'reporter', Mocha.reporters); validatePlugin(argv, 'ui', Mocha.interfaces); diff --git a/lib/mocha.js b/lib/mocha.js index 2856a82071..af5e96940f 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -92,6 +92,8 @@ exports.Test = require('./test'); * @param {string} [options.ui] - Interface name. * @param {boolean} [options.parallel] - Run jobs in parallel * @param {number} [options.jobs] - Max number of worker processes for parallel runs + * @param {MochaRootHooks} [options.rootHooks] - Hooks to bootstrap the root + * suite with */ function Mocha(options) { options = utils.assign({}, mocharc, options || {}); @@ -132,7 +134,8 @@ function Mocha(options) { 'fullTrace', 'growl', 'inlineDiffs', - 'invert' + 'invert', + 'rootHooks' ].forEach(function(opt) { if (options[opt]) { this[opt](); @@ -876,3 +879,40 @@ Mocha.prototype.run = function(fn) { return runner.run(done, {files: this.files, options: options}); }; + +/** + * Assigns hooks to the root suite + * @param {MochaRootHooks} [hooks] - Hooks to assign to root suite + * @chainable + */ +Mocha.prototype.rootHooks = function rootHooks(hooks) { + if (utils.type(hooks) === 'object') { + var beforeAll = [].concat(hooks.beforeAll || []); + var beforeEach = [].concat(hooks.beforeEach || []); + var afterAll = [].concat(hooks.afterAll || []); + var afterEach = [].concat(hooks.afterEach || []); + var rootSuite = this.suite; + beforeAll.forEach(function(hook) { + rootSuite.beforeAll(hook); + }); + beforeEach.forEach(function(hook) { + rootSuite.beforeEach(hook); + }); + afterAll.forEach(function(hook) { + rootSuite.afterAll(hook); + }); + afterEach.forEach(function(hook) { + rootSuite.afterEach(hook); + }); + } + return this; +}; + +/** + * An alternative way to define root hooks that works with parallel runs. + * @typedef {Object} MochaRootHooks + * @property {Function|Function[]} [beforeAll] - "Before all" hook(s) + * @property {Function|Function[]} [beforeEach] - "Before each" hook(s) + * @property {Function|Function[]} [afterAll] - "After all" hook(s) + * @property {Function|Function[]} [afterEach] - "After each" hook(s) + */ diff --git a/lib/worker.js b/lib/worker.js index 354ccb0594..c4f7b53532 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -22,13 +22,13 @@ if (workerpool.isMainThread) { * Handles `--require` and `--ui`. Does _not_ handle `--reporter`, * as only the `Buffered` reporter is used. * - * **This function only runs once**; it overwrites itself with a no-op + * **This function only runs once per worker**; it overwrites itself with a no-op * before returning. * * @param {Options} argv - Command-line options */ let bootstrap = argv => { - handleRequires(argv.require); + argv.rootHooks = handleRequires(argv.require); validatePlugin(argv, 'ui', Mocha.interfaces); process.on('beforeExit', () => { /* istanbul ignore next */ diff --git a/test/integration/fixtures/options/require/root-hook-defs-a.fixture.js b/test/integration/fixtures/options/require/root-hook-defs-a.fixture.js new file mode 100644 index 0000000000..8938816eee --- /dev/null +++ b/test/integration/fixtures/options/require/root-hook-defs-a.fixture.js @@ -0,0 +1,16 @@ +'use strict'; + +exports.mochaHooks = { + beforeAll() { + console.log('beforeAll'); + }, + beforeEach() { + console.log('beforeEach'); + }, + afterAll() { + console.log('afterAll'); + }, + afterEach() { + console.log('afterEach'); + } +}; diff --git a/test/integration/fixtures/options/require/root-hook-defs-b.fixture.js b/test/integration/fixtures/options/require/root-hook-defs-b.fixture.js new file mode 100644 index 0000000000..6aa0ed3342 --- /dev/null +++ b/test/integration/fixtures/options/require/root-hook-defs-b.fixture.js @@ -0,0 +1,36 @@ +'use strict'; + +exports.mochaHooks = { + beforeAll: [ + function() { + console.log('beforeAll array 1'); + }, + function() { + console.log('beforeAll array 2'); + } + ], + beforeEach: [ + function() { + console.log('beforeEach array 1'); + }, + function() { + console.log('beforeEach array 2'); + } + ], + afterAll: [ + function() { + console.log('afterAll array 1'); + }, + function() { + console.log('afterAll array 2'); + } + ], + afterEach: [ + function() { + console.log('afterEach array 1'); + }, + function() { + console.log('afterEach array 2'); + } + ] +}; diff --git a/test/integration/fixtures/options/require/root-hook-test.fixture.js b/test/integration/fixtures/options/require/root-hook-test.fixture.js new file mode 100644 index 0000000000..412895c87c --- /dev/null +++ b/test/integration/fixtures/options/require/root-hook-test.fixture.js @@ -0,0 +1,6 @@ +// run with --require root-hook-defs-a.fixture.js --require +// root-hook-defs-b.fixture.js + +it('should have some root hooks', function() { + // test +}); \ No newline at end of file diff --git a/test/integration/options/require.spec.js b/test/integration/options/require.spec.js new file mode 100644 index 0000000000..a963b81e98 --- /dev/null +++ b/test/integration/options/require.spec.js @@ -0,0 +1,23 @@ +'use strict'; + +var invokeMochaAsync = require('../helpers').invokeMochaAsync; + +describe('--require', function() { + it('should allow registration of root hooks', function() { + return expect( + invokeMochaAsync([ + '--require', + require.resolve( + '../fixtures/options/require/root-hook-defs-a.fixture.js' + ), + '--require', + require.resolve( + '../fixtures/options/require/root-hook-defs-b.fixture.js' + ), + require.resolve('../fixtures/options/require/root-hook-test.fixture.js') + ])[1], + 'when fulfilled', + 'to have succeeded' + ); + }); +}); From 926a934d8262b830a4fd72ac29bb1b248c2eee65 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 16:05:07 -0700 Subject: [PATCH 52/92] remove warning about --parallel run with a single test file this gets annoying too quickly --- lib/buffered-runner.js | 7 ------- 1 file changed, 7 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index cc8d9cfc6f..79d9f871e2 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -6,7 +6,6 @@ const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; const debug = require('debug')('mocha:parallel:buffered-runner'); const workerpool = require('workerpool'); const {deserialize} = require('./serializer'); -const {warn} = require('./utils'); const WORKER_PATH = require.resolve('./worker.js'); const {setInterval, clearInterval} = global; const debugStats = pool => { @@ -61,12 +60,6 @@ class BufferedRunner extends Runner { let pool; try { - if (files.length === 1) { - // using --parallel on a single file is slower than it needs to be. - warn( - `(Mocha) Only one (1) test file detected (${files[0]}); consider omitting --parallel` - ); - } const jobs = options.jobs || os.cpus().length - 1; debug('starting pool with %d max workers', jobs); pool = workerpool.pool(WORKER_PATH, { From 7210ca07b166ffc7080ff3d4f851ab56c79cd9a3 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 16:15:28 -0700 Subject: [PATCH 53/92] rootHooks fix --- lib/worker.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/worker.js b/lib/worker.js index c4f7b53532..e2a09dea4a 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -10,6 +10,8 @@ const {setInterval, clearInterval} = global; const BUFFERED_REPORTER_PATH = require.resolve('./reporters/buffered'); +let rootHooks; + if (workerpool.isMainThread) { throw new Error( 'This script is intended to be run as a worker (by the `workerpool` package).' @@ -28,7 +30,7 @@ if (workerpool.isMainThread) { * @param {Options} argv - Command-line options */ let bootstrap = argv => { - argv.rootHooks = handleRequires(argv.require); + rootHooks = handleRequires(argv.require); validatePlugin(argv, 'ui', Mocha.interfaces); process.on('beforeExit', () => { /* istanbul ignore next */ @@ -60,7 +62,8 @@ async function run(filepath, argv = {ui: 'bdd'}) { // workers only use the `Buffered` reporter. reporter: BUFFERED_REPORTER_PATH, // if this was true, it would cause infinite recursion. - parallel: false + parallel: false, + rootHooks }); bootstrap(opts); From c79c359ec51955c45f0a18db09a87c46220a8f1b Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 16:17:09 -0700 Subject: [PATCH 54/92] another rootHooks fix --- lib/worker.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/worker.js b/lib/worker.js index e2a09dea4a..f0467e558d 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -62,12 +62,13 @@ async function run(filepath, argv = {ui: 'bdd'}) { // workers only use the `Buffered` reporter. reporter: BUFFERED_REPORTER_PATH, // if this was true, it would cause infinite recursion. - parallel: false, - rootHooks + parallel: false }); bootstrap(opts); + opts.rootHooks = opts.rootHooks || rootHooks; + const mocha = new Mocha(opts).addFile(filepath); try { From fb6300b8a9d63289af73ea69c30efdce93178119 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 16:50:29 -0700 Subject: [PATCH 55/92] test actually checks rootHooks behavior --- lib/mocha.js | 7 ++- lib/worker.js | 8 +-- test/integration/options/require.spec.js | 69 ++++++++++++++++++------ 3 files changed, 63 insertions(+), 21 deletions(-) diff --git a/lib/mocha.js b/lib/mocha.js index af5e96940f..ddb6856c8e 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -134,14 +134,17 @@ function Mocha(options) { 'fullTrace', 'growl', 'inlineDiffs', - 'invert', - 'rootHooks' + 'invert' ].forEach(function(opt) { if (options[opt]) { this[opt](); } }, this); + if (options.rootHooks) { + this.rootHooks(options.rootHooks); + } + if (options.parallel) { this._runner = require('./buffered-runner'); this.lazyLoadFiles = true; diff --git a/lib/worker.js b/lib/worker.js index f0467e558d..f6790a3c80 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -36,7 +36,6 @@ let bootstrap = argv => { /* istanbul ignore next */ debug('exiting'); }); - debug('bootstrapped'); bootstrap = () => {}; }; @@ -67,7 +66,7 @@ async function run(filepath, argv = {ui: 'bdd'}) { bootstrap(opts); - opts.rootHooks = opts.rootHooks || rootHooks; + opts.rootHooks = rootHooks; const mocha = new Mocha(opts).addFile(filepath); @@ -86,7 +85,10 @@ async function run(filepath, argv = {ui: 'bdd'}) { // Runner adds these; if we don't remove them, we'll get a leak. process.removeAllListeners('uncaughtException'); - debug('completed run with %d test failures', result.failures); + debug( + 'completed run with %d test failures', + typeof result.failures === 'number' ? result.failures : 0 + ); try { const serialized = serialize(result); debug('returning to main process'); diff --git a/test/integration/options/require.spec.js b/test/integration/options/require.spec.js index a963b81e98..726732cd5b 100644 --- a/test/integration/options/require.spec.js +++ b/test/integration/options/require.spec.js @@ -3,21 +3,58 @@ var invokeMochaAsync = require('../helpers').invokeMochaAsync; describe('--require', function() { - it('should allow registration of root hooks', function() { - return expect( - invokeMochaAsync([ - '--require', - require.resolve( - '../fixtures/options/require/root-hook-defs-a.fixture.js' - ), - '--require', - require.resolve( - '../fixtures/options/require/root-hook-defs-b.fixture.js' - ), - require.resolve('../fixtures/options/require/root-hook-test.fixture.js') - ])[1], - 'when fulfilled', - 'to have succeeded' - ); + describe('when run in serial', function() { + it('should allow registration of root hooks', function() { + return expect( + invokeMochaAsync([ + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-a.fixture.js' + ), + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-b.fixture.js' + ), + require.resolve( + '../fixtures/options/require/root-hook-test.fixture.js' + ) + ])[1], + 'when fulfilled', + 'to contain output', + /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\n/ + ).and( + 'when fulfilled', + 'to contain output', + /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ + ); + }); + }); + + describe('when run with --parallel', function() { + it('should allow registration of root hooks', function() { + return expect( + invokeMochaAsync([ + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-a.fixture.js' + ), + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-b.fixture.js' + ), + '--parallel', + require.resolve( + '../fixtures/options/require/root-hook-test.fixture.js' + ) + ])[1], + 'when fulfilled', + 'to contain output', + /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\n/ + ).and( + 'when fulfilled', + 'to contain output', + /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ + ); + }); }); }); From 41dcbc9c53c1ad4ff27b21977394f70069bf060d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 17:20:06 -0700 Subject: [PATCH 56/92] try to force color in CI Signed-off-by: Christopher Hiller --- package-scripts.js | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/package-scripts.js b/package-scripts.js index fe4395aed4..1a79fd05c1 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -12,8 +12,11 @@ const path = require('path'); function test(testName, mochaParams) { const coverageCommand = `nyc --no-clean --report-dir coverage/reports/${testName}`; const mochaCommand = `node ${path.join('bin', 'mocha')}`; // Include 'node' and path.join for Windows compatibility - if (process.env.CI && !/^only-/.test(testName)) { - mochaParams += ' --forbid-only'; + if (process.env.CI) { + if (!/^only-/.test(testName)) { + mochaParams += ' --forbid-only'; + } + mochaParams += ' --color'; } return `${ process.env.COVERAGE ? coverageCommand : '' From 0415bb7b5537353967258c810d833de260a6d0a0 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 17:29:58 -0700 Subject: [PATCH 57/92] json-stream reporter is incompatible with --parallel this could be fixed (but probably not without breaking changes), but again, `runner.total` is the issue. --- lib/cli/run.js | 6 +++ test/integration/options/parallel.spec.js | 53 ++++++++--------------- 2 files changed, 24 insertions(+), 35 deletions(-) diff --git a/lib/cli/run.js b/lib/cli/run.js index c49da63762..703c887950 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -327,6 +327,12 @@ exports.builder = yargs => ); } + if (argv.reporter === 'json-stream' && argv.parallel) { + throw createUnsupportedError( + '--reporter=json-stream is mutually exclusive with --parallel' + ); + } + // load requires first, because it can impact "plugin" validation argv.rootHooks = handleRequires(argv.require); validatePlugin(argv, 'reporter', Mocha.reporters); diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 92bdbc2329..869f643483 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -205,8 +205,8 @@ describe('--parallel', function() { describe('reporter equivalence', function() { // each reporter name is duplicated; one is in all lower-case - // 'base' is abstract, 'html' is browser-only, and 'progress' & `markdown` are incompatible - var DENY = ['progress', 'base', 'html', 'markdown']; + // 'base' is abstract, 'html' is browser-only, others are incompatible + var DENY = ['progress', 'base', 'html', 'markdown', 'json-stream']; Object.keys(Mocha.reporters) .filter(function(name) { return /^[a-z]/.test(name) && DENY.indexOf(name) === -1; @@ -285,12 +285,6 @@ describe('--parallel', function() { }); }); - describe('when a single test file is run with --reporter=markdown', function() { - it('should have the same output as when run with --no-parallel', function() { - return runGenericReporterTest.call(this, 'markdown'); - }); - }); - describe('when a single test file is run with --reporter=landing', function() { it('should have the same output as when run with --no-parallel', function() { return runGenericReporterTest.call(this, 'landing'); @@ -331,38 +325,27 @@ describe('--parallel', function() { }); }); - describe('when a single test file is run with --reporter=json', function() { - it('should have the same output as when run with --no-parallel', function() { - // this one has some timings/durations that we can safely ignore - return compareReporters.call(this, 'json').then(function(result) { - var expected = result.shift(); - expected.output = JSON.parse(expected.output); - var actual = result.shift(); - actual.output = JSON.parse(actual.output); - return expect(actual, 'to satisfy', { - passing: expected.passing, - failing: expected.failing, - pending: expected.pending, - code: expected.code, - output: { - stats: { - suites: expected.output.stats.suites, - tests: expected.output.stats.tests, - passes: expected.output.stats.passes, - pending: expected.output.stats.pending, - failures: expected.output.stats.failures - }, - tests: expected.tests - } - }); - }); + describe('when a single test file is run with --reporter=json-stream', function() { + it('should fail due to incompatibility', function() { + return expect( + invokeMochaAsync( + [ + require.resolve('../fixtures/options/parallel/test-a.fixture.js'), + '--reporter=json-stream', + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ).and('when fulfilled', 'to contain output', /mutually exclusive/); }); }); - describe('when a single test file is run with --reporter=json-stream', function() { + describe('when a single test file is run with --reporter=json', function() { it('should have the same output as when run with --no-parallel', function() { // this one has some timings/durations that we can safely ignore - return compareReporters.call(this, 'json-stream').then(function(result) { + return compareReporters.call(this, 'json').then(function(result) { var expected = result.shift(); expected.output = JSON.parse(expected.output); var actual = result.shift(); From 8d1a173b8e777609cd03b481f3581070317e9e60 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 8 Apr 2020 17:35:54 -0700 Subject: [PATCH 58/92] update lockfile after rebase --- package-lock.json | 99 ++++++++--------------------------------------- 1 file changed, 17 insertions(+), 82 deletions(-) diff --git a/package-lock.json b/package-lock.json index ae095c5af5..068aa79749 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1039,17 +1039,6 @@ "es-abstract": "^1.17.0-next.1" } }, - "array.prototype.map": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", - "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "es-array-method-boxes-properly": "^1.0.0", - "is-string": "^1.0.4" - } - }, "arraybuffer.slice": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", @@ -5478,6 +5467,7 @@ "version": "1.17.5", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", + "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", @@ -5492,36 +5482,11 @@ "string.prototype.trimright": "^2.1.1" } }, - "es-array-method-boxes-properly": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", - "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==" - }, - "es-get-iterator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", - "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", - "requires": { - "es-abstract": "^1.17.4", - "has-symbols": "^1.0.1", - "is-arguments": "^1.0.4", - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-string": "^1.0.5", - "isarray": "^2.0.5" - }, - "dependencies": { - "isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" - } - } - }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -7350,6 +7315,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, "requires": { "function-bind": "^1.1.1" } @@ -8290,11 +8256,6 @@ "is-decimal": "^1.0.0" } }, - "is-arguments": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", - "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==" - }, "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", @@ -8317,7 +8278,8 @@ "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==" + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", + "dev": true }, "is-ci": { "version": "1.2.1", @@ -8371,7 +8333,8 @@ "is-date-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true }, "is-decimal": { "version": "1.0.4", @@ -8474,11 +8437,6 @@ "integrity": "sha1-gV0USxTVac7L6tTVaTCX8Aqb9sU=", "dev": true }, - "is-map": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", - "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==" - }, "is-natural-number": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", @@ -8589,6 +8547,7 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", + "dev": true, "requires": { "has": "^1.0.3" } @@ -8620,11 +8579,6 @@ "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", "dev": true }, - "is-set": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", - "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==" - }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", @@ -8634,7 +8588,8 @@ "is-string": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==" + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true }, "is-svg": { "version": "3.0.0", @@ -8649,6 +8604,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, "requires": { "has-symbols": "^1.0.1" } @@ -8882,20 +8838,6 @@ "is-object": "^1.0.1" } }, - "iterate-iterator": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", - "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==" - }, - "iterate-value": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", - "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", - "requires": { - "es-get-iterator": "^1.0.2", - "iterate-iterator": "^1.0.1" - } - }, "javascript-stringify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.0.1.tgz", @@ -12020,7 +11962,8 @@ "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==" + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", + "dev": true }, "object-keys": { "version": "1.1.1", @@ -13763,18 +13706,6 @@ "asap": "~2.0.3" } }, - "promise.allsettled": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", - "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", - "requires": { - "array.prototype.map": "^1.0.1", - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1", - "function-bind": "^1.1.1", - "iterate-value": "^1.0.0" - } - }, "proto-list": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", @@ -16149,6 +16080,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", + "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -16158,6 +16090,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", + "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16168,6 +16101,7 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", + "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16178,6 +16112,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", + "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" From 7f3582599fcc420bc6910075e4b9797343e557ca Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 10 Apr 2020 16:26:25 -0700 Subject: [PATCH 59/92] better failure modes - add `SIGINT` handling for clean worker death - don't sent `SIGTERM` in parallel mode; otherwise it can leave processes hanging around - make `--jobs=0` or `--jobs=1` cause Mocha to run in serial even if `--parallel` is true - reorganize some yargs checks - handle uncaught exceptions from worker processes, recovering where possible - handle `--allow-uncaught` to the best of our ability - ensure `uncaught.spec.js` doesn't flirt with max listeners - make `exit.spec.js` handle `SIGINT` and send `SIGTERM` as fixture should be run in serial, but also with `_mocha` (so there is no global `SIGINT` handler; unsure how the hell this worked before); renamed a misnamed test - skip ESM tests for `--parallel` in unsupported environments - fix `DEBUG` handling in integration tests - fix `bail`-related test for `--parallel` - adds module `promise.allsettled` as a polyfill, which allows us to recover from exceptions in worker processes - upgrades `unexpected-eventemitter` to v2.1.0, which allows for making event assertions in async functions --- bin/mocha | 4 +- lib/buffered-runner.js | 109 +++++++++++++----- lib/cli/run.js | 45 ++++---- package-lock.json | 105 +++++++++++++---- package.json | 3 +- .../options/jobs/fail-in-parallel.fixture.js | 7 ++ .../options/parallel/syntax-err.fixture.js | 1 + .../fixtures/uncaught/listeners.fixture.js | 4 +- test/integration/helpers.js | 5 +- test/integration/options/exit.spec.js | 31 +++-- test/integration/options/jobs.spec.js | 37 ++++-- test/integration/options/parallel.spec.js | 36 ++++++ test/node-unit/buffered-runner.spec.js | 14 +-- 13 files changed, 293 insertions(+), 108 deletions(-) create mode 100644 test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js create mode 100644 test/integration/fixtures/options/parallel/syntax-err.fixture.js diff --git a/bin/mocha b/bin/mocha index 9c63463790..ff8e6c8c46 100755 --- a/bin/mocha +++ b/bin/mocha @@ -127,7 +127,9 @@ if (Object.keys(nodeArgs).length) { // terminate children. process.on('SIGINT', () => { proc.kill('SIGINT'); // calls runner.abort() - proc.kill('SIGTERM'); // if that didn't work, we're probably in an infinite loop, so make it die. + if (!args.parallel || args.jobs < 2) { + proc.kill('SIGTERM'); // if that didn't work, we're probably in an infinite loop, so make it die. + } }); } else { require('../lib/cli/cli').main(unparse(mochaArgs, {alias: aliases})); diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 79d9f871e2..55ca246d5b 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -1,5 +1,6 @@ 'use strict'; +const allSettled = require('promise.allsettled'); const os = require('os'); const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; @@ -19,6 +20,11 @@ const debugStats = pool => { ); }; +/** + * The interval at which we will display stats for worker processes in debug mode + */ +const DEBUG_STATS_INTERVAL = 5000; + /** * This `Runner` delegates tests runs to worker threads. Does not execute any * {@link Runnable}s by itself! @@ -45,9 +51,14 @@ class BufferedRunner extends Runner { * command-line options, respectively. */ run(callback, {files, options} = {}) { + /** + * Listener on `Process.SIGINT` which tries to cleanly terminate the worker pool. + */ + let sigIntListener; // This function should _not_ return a `Promise`; its parent (`Runner#run`) // returns this instance, so this should do the same. However, we want to make // use of `async`/`await`, so we use this IIFE. + (async () => { /** * This is an interval that outputs stats about the worker pool every so often @@ -61,13 +72,27 @@ class BufferedRunner extends Runner { try { const jobs = options.jobs || os.cpus().length - 1; - debug('starting pool with %d max workers', jobs); + debug('run(): starting pool with %d max workers', jobs); pool = workerpool.pool(WORKER_PATH, { workerType: 'process', maxWorkers: jobs }); - let totalFailureCount = 0; + sigIntListener = async () => { + if (!didAbort) { + didAbort = true; + try { + debug('shutting down %d (max) workers', jobs); + await pool.terminate(true); + } catch (err) { + console.error(err); + } finally { + process.exit(128); + } + } + }; + + process.on('SIGINT', sigIntListener); /** * This flag is used by all workers in the pool; it tells them that we @@ -77,27 +102,32 @@ class BufferedRunner extends Runner { */ let didAbort = false; - this.emit(EVENT_RUN_BEGIN); - // the "pool proxy" object is essentially just syntactic sugar to call a // worker's procedure as one would a regular function. const poolProxy = await pool.proxy(); - debugInterval = setInterval(() => debugStats(pool), 5000).unref(); + debugInterval = setInterval( + () => debugStats(pool), + DEBUG_STATS_INTERVAL + ).unref(); - await Promise.all( + // this is set for uncaught exception handling in `Runner#uncaught` + this.started = true; + this.emit(EVENT_RUN_BEGIN); + + const results = await allSettled( files.map(async file => { - debug('enqueueing test file %s', file); + debug('run(): enqueueing test file %s', file); try { const result = await poolProxy.run(file, options); const {failureCount, events} = deserialize(result); debug( - 'completed run of file %s; %d failures / %d events', + 'run(): completed run of file %s; %d failures / %d events', file, failureCount, events.length ); - totalFailureCount += failureCount; // can this ever be non-numeric? + this.failures += failureCount; // can this ever be non-numeric? /** * If we set this, then we encountered a "bail" flag, and will * terminate the pool once all events have been emitted. @@ -107,14 +137,11 @@ class BufferedRunner extends Runner { while (event) { this.emit(event.eventName, event.data, event.error); if ( - (totalFailureCount || event.error) && + (failureCount || event.error) && event.data && event.data._bail ) { - debug( - 'nonzero failure count & found bail flag in event: %O', - event - ); + debug('run(): nonzero failure count & found bail flag'); // we need to let the events complete for this file, as the worker // should run any cleanup hooks shouldAbort = true; @@ -123,41 +150,63 @@ class BufferedRunner extends Runner { } if (shouldAbort) { didAbort = true; - debug('terminating pool due to "bail" flag'); + debug('run(): terminating pool due to "bail" flag'); await pool.terminate(true); } - debug('all events reported for file %s', file); } catch (err) { - if (!didAbort) { - debug('terminating pool due to uncaught exception'); - didAbort = true; - await pool.terminate(true); - this.uncaught(err); + if (didAbort) { + debug( + 'run(): worker pool terminated with intent; skipping file %s', + file + ); } else { - debug('worker pool terminated; skipping file %s', file); + // this is an uncaught exception + if (this.allowUncaught) { + debug( + 'run(): found uncaught exception with --allow-uncaught' + ); + // still have to clean up + didAbort = true; + await pool.terminate(true); + } else { + debug('run(): found uncaught exception: %O', err); + } + throw err; } } finally { - debug('done running file %s', file); + debug('run(): done running file %s', file); } }) ); - // note that if we aborted due to "bail", this will have happened - // already + // note that pool may already be terminated due to --bail await pool.terminate(); + results + .filter(({status}) => status === 'rejected') + .forEach(({reason}) => { + if (this.allowUncaught) { + throw reason; + } + // "rejected" will correspond to uncaught exceptions. + // unlike the serial runner, the parallel runner can always recover. + this.uncaught(reason); + }); + this.emit(EVENT_RUN_END); - debug('completing with failure count %d', totalFailureCount); - callback(totalFailureCount); + debug('run(): completing with failure count %d', this.failures); + callback(this.failures); } catch (err) { - // this is an "unknown" error; probably from 3p code - /* istanbul ignore next */ + // this is probably from an uncaught exception and this.allowUncaught. + // Promise would trap this otherwise process.nextTick(() => { + debug('run(): throwing uncaught exception'); + process.exitCode = process.exitCode || this.failures || 1; throw err; }); } finally { clearInterval(debugInterval); - debugStats(pool); + process.removeListener('SIGINT', sigIntListener); } })(); return this; diff --git a/lib/cli/run.js b/lib/cli/run.js index 703c887950..e6baaae679 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -152,13 +152,12 @@ exports.builder = yargs => group: GROUPS.FILTERS }, jobs: { - description: 'Number of concurrent jobs', - implies: 'parallel', - defaultDescription: `CPU core count (${coreCount}) -`, + description: + 'Number of concurrent jobs for --parallel; use 1 to run in serial', + defaultDescription: '(number of CPU cores - 1)', requiresArg: true, group: GROUPS.RULES, - coerce: value => - Number(typeof value === 'undefined' ? coreCount - 1 : value) + default: Math.min(2, coreCount - 1) }, 'list-interfaces': { conflicts: Array.from(ONE_AND_DONE_ARGS), @@ -299,6 +298,24 @@ exports.builder = yargs => '--parallel runs test files in a non-deterministic order, and is mutually exclusive with --sort' ); } + + if (argv.reporter === 'progress') { + throw createUnsupportedError( + '--reporter=progress is mutually exclusive with --parallel' + ); + } + + if (argv.reporter === 'markdown') { + throw createUnsupportedError( + '--reporter=markdown is mutually exclusive with --parallel' + ); + } + + if (argv.reporter === 'json-stream') { + throw createUnsupportedError( + '--reporter=json-stream is mutually exclusive with --parallel' + ); + } } if (argv.compilers) { @@ -315,24 +332,6 @@ exports.builder = yargs => ); } - if (argv.reporter === 'progress' && argv.parallel) { - throw createUnsupportedError( - '--reporter=progress is mutually exclusive with --parallel' - ); - } - - if (argv.reporter === 'markdown' && argv.parallel) { - throw createUnsupportedError( - '--reporter=markdown is mutually exclusive with --parallel' - ); - } - - if (argv.reporter === 'json-stream' && argv.parallel) { - throw createUnsupportedError( - '--reporter=json-stream is mutually exclusive with --parallel' - ); - } - // load requires first, because it can impact "plugin" validation argv.rootHooks = handleRequires(argv.require); validatePlugin(argv, 'reporter', Mocha.reporters); diff --git a/package-lock.json b/package-lock.json index 068aa79749..aa583fda7e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1039,6 +1039,17 @@ "es-abstract": "^1.17.0-next.1" } }, + "array.prototype.map": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/array.prototype.map/-/array.prototype.map-1.0.2.tgz", + "integrity": "sha512-Az3OYxgsa1g7xDYp86l0nnN4bcmuEITGe1rbdEBVkrqkzMgDcbdQ2R7r41pNzti+4NMces3H8gMmuioZUilLgw==", + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "es-array-method-boxes-properly": "^1.0.0", + "is-string": "^1.0.4" + } + }, "arraybuffer.slice": { "version": "0.0.7", "resolved": "https://registry.npmjs.org/arraybuffer.slice/-/arraybuffer.slice-0.0.7.tgz", @@ -5467,7 +5478,6 @@ "version": "1.17.5", "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", - "dev": true, "requires": { "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", @@ -5482,11 +5492,36 @@ "string.prototype.trimright": "^2.1.1" } }, + "es-array-method-boxes-properly": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz", + "integrity": "sha512-wd6JXUmyHmt8T5a2xreUwKcGPq6f1f+WwIJkijUqiGcJz1qqnZgP6XIK+QyIWU5lT7imeNxUll48bziG+TSYcA==" + }, + "es-get-iterator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", + "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", + "requires": { + "es-abstract": "^1.17.4", + "has-symbols": "^1.0.1", + "is-arguments": "^1.0.4", + "is-map": "^2.0.1", + "is-set": "^2.0.1", + "is-string": "^1.0.5", + "isarray": "^2.0.5" + }, + "dependencies": { + "isarray": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", + "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" + } + } + }, "es-to-primitive": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "dev": true, "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -7315,7 +7350,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "dev": true, "requires": { "function-bind": "^1.1.1" } @@ -8256,6 +8290,11 @@ "is-decimal": "^1.0.0" } }, + "is-arguments": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", + "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==" + }, "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", @@ -8278,8 +8317,7 @@ "is-callable": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.5.tgz", - "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==", - "dev": true + "integrity": "sha512-ESKv5sMCJB2jnHTWZ3O5itG+O128Hsus4K4Qh1h2/cgn2vbgnLSVqfV46AeJA9D5EeeLa9w81KUXMtn34zhX+Q==" }, "is-ci": { "version": "1.2.1", @@ -8333,8 +8371,7 @@ "is-date-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", - "dev": true + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" }, "is-decimal": { "version": "1.0.4", @@ -8437,6 +8474,11 @@ "integrity": "sha1-gV0USxTVac7L6tTVaTCX8Aqb9sU=", "dev": true }, + "is-map": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", + "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==" + }, "is-natural-number": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", @@ -8547,7 +8589,6 @@ "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.5.tgz", "integrity": "sha512-vlKW17SNq44owv5AQR3Cq0bQPEb8+kF3UKZ2fiZNOWtztYE5i0CzCZxFDwO58qAOWtxdBRVO/V5Qin1wjCqFYQ==", - "dev": true, "requires": { "has": "^1.0.3" } @@ -8579,6 +8620,11 @@ "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", "dev": true }, + "is-set": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", + "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==" + }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", @@ -8588,8 +8634,7 @@ "is-string": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", - "dev": true + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==" }, "is-svg": { "version": "3.0.0", @@ -8604,7 +8649,6 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", - "dev": true, "requires": { "has-symbols": "^1.0.1" } @@ -8838,6 +8882,20 @@ "is-object": "^1.0.1" } }, + "iterate-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/iterate-iterator/-/iterate-iterator-1.0.1.tgz", + "integrity": "sha512-3Q6tudGN05kbkDQDI4CqjaBf4qf85w6W6GnuZDtUVYwKgtC1q8yxYX7CZed7N+tLzQqS6roujWvszf13T+n9aw==" + }, + "iterate-value": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/iterate-value/-/iterate-value-1.0.2.tgz", + "integrity": "sha512-A6fMAio4D2ot2r/TYzr4yUWrmwNdsN5xL7+HUiyACE4DXm+q8HtPcnFTp+NnW3k4N05tZ7FVYFFb2CR13NxyHQ==", + "requires": { + "es-get-iterator": "^1.0.2", + "iterate-iterator": "^1.0.1" + } + }, "javascript-stringify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.0.1.tgz", @@ -11962,8 +12020,7 @@ "object-inspect": { "version": "1.7.0", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==", - "dev": true + "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==" }, "object-keys": { "version": "1.1.1", @@ -13706,6 +13763,18 @@ "asap": "~2.0.3" } }, + "promise.allsettled": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/promise.allsettled/-/promise.allsettled-1.0.2.tgz", + "integrity": "sha512-UpcYW5S1RaNKT6pd+s9jp9K9rlQge1UXKskec0j6Mmuq7UJCvlS2J2/s/yuPN8ehftf9HXMxWlKiPbGGUzpoRg==", + "requires": { + "array.prototype.map": "^1.0.1", + "define-properties": "^1.1.3", + "es-abstract": "^1.17.0-next.1", + "function-bind": "^1.1.1", + "iterate-value": "^1.0.0" + } + }, "proto-list": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/proto-list/-/proto-list-1.2.4.tgz", @@ -16080,7 +16149,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.0.tgz", "integrity": "sha512-EEJnGqa/xNfIg05SxiPSqRS7S9qwDhYts1TSLR1BQfYUfPe1stofgGKvwERK9+9yf+PpfBMlpBaCHucXGPQfUA==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -16090,7 +16158,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16101,7 +16168,6 @@ "version": "2.1.2", "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5", @@ -16112,7 +16178,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.0.tgz", "integrity": "sha512-iCP8g01NFYiiBOnwG1Xc3WZLyoo+RuBymwIlWncShXDDJYWN6DbnM3odslBJdgCdRlq94B5s63NWAZlcn2CS4w==", - "dev": true, "requires": { "define-properties": "^1.1.3", "es-abstract": "^1.17.5" @@ -17110,9 +17175,9 @@ "dev": true }, "unexpected-eventemitter": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unexpected-eventemitter/-/unexpected-eventemitter-2.0.0.tgz", - "integrity": "sha512-/dQtSU7l5/1jzx+ZD2GKufh3J+rupNoIQC0Wk6bQz2qLwHKjfY8GaCXSdykaSpQHRNDr/2LgeFnK0In5cfQwPA==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unexpected-eventemitter/-/unexpected-eventemitter-2.1.0.tgz", + "integrity": "sha512-qsXYpdVJzXhTLbxOwO9t7d0jabm5Wj6mzKFcbdsXI49Q6FxX0ntpCI3fh5rvgcpRH85AxHSQQp1uePcdw5MVDg==", "dev": true }, "unexpected-sinon": { diff --git a/package.json b/package.json index e62fb599f1..25093d4028 100644 --- a/package.json +++ b/package.json @@ -58,6 +58,7 @@ "minimatch": "3.0.4", "ms": "2.1.2", "object.assign": "4.1.0", + "promise.allsettled": "^1.0.2", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", "which": "2.0.2", @@ -122,7 +123,7 @@ "through2": "^3.0.1", "to-vfile": "^6.1.0", "unexpected": "^11.14.0", - "unexpected-eventemitter": "^2.0.0", + "unexpected-eventemitter": "^2.1.0", "unexpected-sinon": "^10.11.2", "uslug": "^1.0.4", "watchify": "^3.11.1" diff --git a/test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js b/test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js new file mode 100644 index 0000000000..4601ed330c --- /dev/null +++ b/test/integration/fixtures/options/jobs/fail-in-parallel.fixture.js @@ -0,0 +1,7 @@ +'use strict'; + +it('should fail if in a worker', function() { + if (/worker\.js$/.test(require.main.filename)) { + throw new Error('in worker!'); + } +}); \ No newline at end of file diff --git a/test/integration/fixtures/options/parallel/syntax-err.fixture.js b/test/integration/fixtures/options/parallel/syntax-err.fixture.js new file mode 100644 index 0000000000..8dd2b0f93b --- /dev/null +++ b/test/integration/fixtures/options/parallel/syntax-err.fixture.js @@ -0,0 +1 @@ +var foo = \ No newline at end of file diff --git a/test/integration/fixtures/uncaught/listeners.fixture.js b/test/integration/fixtures/uncaught/listeners.fixture.js index e8dcb1b944..69c4059294 100644 --- a/test/integration/fixtures/uncaught/listeners.fixture.js +++ b/test/integration/fixtures/uncaught/listeners.fixture.js @@ -3,8 +3,8 @@ const assert = require('assert'); const mocha = require("../../../../lib/mocha"); -// keep this under 10 to avoid warning -for (let i = 0; i < 9; i++) { +// keep this low to avoid warning +for (let i = 0; i < 5; i++) { const r = new mocha.Runner(new mocha.Suite("" + i, undefined)); r.run(); } diff --git a/test/integration/helpers.js b/test/integration/helpers.js index dfc35326e8..aee256fa96 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -350,12 +350,13 @@ function _spawnMochaWithListeners(args, fn, opts) { opts = Object.assign( { cwd: process.cwd(), - stdio: ['ignore', 'pipe', 'inherit'] + stdio: ['ignore', 'pipe', 'inherit'], + env: Object.assign({}, process.env) }, opts ); // prevent DEBUG from borking STDERR when piping. - // delete opts.env.DEBUG; + delete opts.env.DEBUG; debug('spawning: %s', [process.execPath].concat(args).join(' ')); var mocha = spawn(process.execPath, args, opts); diff --git a/test/integration/options/exit.spec.js b/test/integration/options/exit.spec.js index 376496f4fc..3aaa11c82b 100644 --- a/test/integration/options/exit.spec.js +++ b/test/integration/options/exit.spec.js @@ -1,7 +1,6 @@ 'use strict'; -var helpers = require('../helpers'); -var runMochaJSON = helpers.runMochaJSON; +var runMocha = require('../helpers').runMocha; describe('--exit', function() { var behaviors = { @@ -9,13 +8,30 @@ describe('--exit', function() { disabled: '--no-exit' }; + // subprocess + var mocha; + + function killSubprocess() { + mocha.kill('SIGINT'); + mocha.kill('SIGTERM'); + } + + // these two handlers deal with a ctrl-c on command-line + before(function() { + process.on('SIGINT', killSubprocess); + }); + + after(function() { + process.removeListener('SIGINT', killSubprocess); + }); + /** * Returns a test that executes Mocha in a subprocess with either * `--exit`, `--no-exit`, or default behavior. * * @param {boolean} shouldExit - Expected result; `true` if Mocha should * have force-killed the process. - * @param {string} [behavior] - 'enabled' or 'disabled' + * @param {"enabled"|"disabled"} [behavior] - 'enabled' or 'disabled'; omit for default * @returns {Function} async function implementing the test */ var runExit = function(shouldExit, behavior) { @@ -28,8 +44,7 @@ describe('--exit', function() { var timeoutObj; var fixture = 'exit.fixture.js'; var args = behaviors[behavior] ? [behaviors[behavior]] : []; - - var mocha = runMochaJSON(fixture, args, function postmortem(err) { + mocha = runMocha(fixture, args, function postmortem(err) { clearTimeout(timeoutObj); if (err) { return done(err); @@ -41,15 +56,13 @@ describe('--exit', function() { // If this callback happens, then Mocha didn't automatically exit. timeoutObj = setTimeout(function() { didExit = false; - // This is the only way to kill the child, afaik. - // After the process ends, the callback to `run()` above is handled. - mocha.kill('SIGINT'); + killSubprocess(); }, timeout - 500); }; }; describe('default behavior', function() { - it('should force exit after root suite completion', runExit(false)); + it('should not force exit after root suite completion', runExit(false)); }); describe('when enabled', function() { diff --git a/test/integration/options/jobs.spec.js b/test/integration/options/jobs.spec.js index 935c60c512..832d825298 100644 --- a/test/integration/options/jobs.spec.js +++ b/test/integration/options/jobs.spec.js @@ -1,19 +1,34 @@ 'use strict'; -var path = require('path'); var helpers = require('../helpers'); var runMochaAsync = helpers.runMochaAsync; describe('--jobs', function() { - it('should not work without --parallel', function() { - return expect( - runMochaAsync( - path.join('options', 'parallel', '*.fixture.js'), - ['--jobs', '3'], - 'pipe' - ), - 'to be fulfilled with value satisfying', - {output: /Missing\s+dependent\s+arguments[^]\s+jobs\s+->\s+parallel/i} - ); + describe('when set to a number less than 2', function() { + it('should run tests in serial', function() { + return expect( + runMochaAsync( + 'options/jobs/fail-in-parallel', + ['--parallel', '--jobs', '1'], + 'pipe' + ), + 'when fulfilled', + 'to have passed' + ); + }); + }); + + describe('when set to a number greater than 1', function() { + it('should run tests in parallel', function() { + return expect( + runMochaAsync( + 'options/jobs/fail-in-parallel', + ['--parallel', '--jobs', '2'], + 'pipe' + ), + 'when fulfilled', + 'to have failed' + ); + }); }); }); diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 869f643483..519e15d8fe 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -4,6 +4,7 @@ var path = require('path'); var helpers = require('../helpers'); var runMochaAsync = helpers.runMochaAsync; var invokeMochaAsync = helpers.invokeMochaAsync; +var utils = require('../../../lib/utils'); function compareReporters(reporter) { this.timeout(5000); @@ -38,6 +39,37 @@ function runGenericReporterTest(reporter) { } describe('--parallel', function() { + describe('when a test has a syntax error', function() { + describe('when there is only a single test file', function() { + it('should fail gracefully', function() { + return expect( + runMochaAsync('options/parallel/syntax-err', ['--parallel']), + 'when fulfilled', + 'to have failed with output', + /SyntaxError/ + ); + }); + }); + + describe('when there are multiple test files', function() { + it('should fail gracefully', function() { + return expect( + invokeMochaAsync( + [ + require.resolve( + '../fixtures/options/parallel/syntax-err.fixture.js' + ), + '--parallel' + ], + 'pipe' + )[1], + 'when fulfilled', + 'to have failed' + ); + }); + }); + }); + describe('when used with CJS tests', function() { it('should have the same result as with --no-parallel', function() { this.timeout(5000); @@ -62,6 +94,10 @@ describe('--parallel', function() { }); describe('when used with ESM tests', function() { + before(function() { + if (!utils.supportsEsModules()) this.skip(); + }); + it('should have the same result as with --no-parallel', function() { this.timeout(5000); return runMochaAsync(path.join('esm', '*.fixture.mjs'), [ diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index 8d7d1fe35b..9680a1c789 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -72,7 +72,7 @@ describe('buffered-runner', function() { }); describe('when a worker fails', function() { - it('should cleanly force-terminate the thread pool', function(done) { + it('should recover', function(done) { const options = {}; run.withArgs('some-file.js', options).rejects(new Error('whoops')); run.withArgs('some-other-file.js', options).resolves({ @@ -95,12 +95,7 @@ describe('buffered-runner', function() { runner.run( () => { - expect(terminate, 'to have calls satisfying', [ - { - args: [true] - }, - {args: []} - ]).and('was called twice'); + expect(terminate, 'to have calls satisfying', [{args: []}]); done(); }, { @@ -398,7 +393,7 @@ describe('buffered-runner', function() { describe('when an event contains an error and has positive failures', function() { describe('when subsequent files have not yet been run', function() { it('should cleanly terminate the thread pool', function(done) { - const options = {bail: true}; + const options = {}; const err = { __type: 'Error', message: 'oh no' @@ -416,7 +411,8 @@ describe('buffered-runner', function() { { eventName: EVENT_SUITE_END, data: { - title: 'some suite' + title: 'some suite', + _bail: true } } ] From da006c7883d18a07c091f8ca2327ef65ebdaefe9 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 10 Apr 2020 16:27:47 -0700 Subject: [PATCH 60/92] utils: add a test for async functions in type() an async function (`async () => {}`) will return `asyncfunction` instead of `function`. who knew!? limited to node tests for now. --- test/node-unit/utils.spec.js | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 test/node-unit/utils.spec.js diff --git a/test/node-unit/utils.spec.js b/test/node-unit/utils.spec.js new file mode 100644 index 0000000000..489052cba4 --- /dev/null +++ b/test/node-unit/utils.spec.js @@ -0,0 +1,17 @@ +'use strict'; + +const utils = require('../../lib/utils'); + +describe('utils', function() { + describe('function', function() { + describe('type', function() { + it('should return "asyncfunction" if the parameter is an async function', function() { + expect( + utils.type(async () => {}), + 'to be', + 'asyncfunction' + ); + }); + }); + }); +}); From 43ae67fe2825a19ffbe7340a8892d991c2edce2d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 10 Apr 2020 16:31:22 -0700 Subject: [PATCH 61/92] all for async definition of mocha root hooks via --require a module can now do: `exports.mochaHooks = async () => ({/* mocha hooks obj */})` in addition to `exports.mochaHooks = { /* mocha hooks obj */ }` (the function style can be sync or async) to do this, we use a `middleware()` function in yargs, which (by default) runs _after_ the checks, but before the handler, _and_ can be async, which it is. --- lib/cli/run-helpers.js | 72 ++++++----- lib/cli/run.js | 9 +- lib/mocha.js | 14 ++- lib/worker.js | 17 +-- .../require/root-hook-defs-c.fixture.js | 16 +++ .../require/root-hook-defs-d.fixture.js | 36 ++++++ .../require/root-hook-test-2.fixture.js | 6 + test/integration/options/require.spec.js | 52 +++++++- test/node-unit/cli/run-helpers.spec.js | 113 ++++++++++++++---- test/node-unit/worker.spec.js | 3 +- 10 files changed, 270 insertions(+), 68 deletions(-) create mode 100644 test/integration/fixtures/options/require/root-hook-defs-c.fixture.js create mode 100644 test/integration/fixtures/options/require/root-hook-defs-d.fixture.js create mode 100644 test/integration/fixtures/options/require/root-hook-test-2.fixture.js diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index 6f762b7752..60a774f5c4 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -13,6 +13,7 @@ const debug = require('debug')('mocha:cli:run:helpers'); const {watchRun, watchParallelRun} = require('./watch-run'); const collectFiles = require('./collect-files'); const {type} = require('../utils'); +const {createUnsupportedError} = require('../errors'); const cwd = process.cwd(); /** @@ -74,42 +75,59 @@ exports.list = str => /** * `require()` the modules as required by `--require `. - * Sorts any require-defined root hooks into buckets to assign to the root suite - * upon `Mocha` instantiation. + * + * Returns array of `mochaHooks` exports, if any. * @param {string[]} requires - Modules to require - * @returns {import('../mocha').MochaRootHooks} Any root hooks + * @returns {Array} Any root hooks * @private */ exports.handleRequires = (requires = []) => - requires.reduce( - (acc, mod) => { - let modpath = mod; - if (fs.existsSync(mod, {cwd}) || fs.existsSync(`${mod}.js`, {cwd})) { - modpath = path.resolve(mod); - debug('resolved required file %s to %s', mod, modpath); - } - const requiredModule = require(modpath); - if ( - type(requiredModule) === 'object' && - type(requiredModule.mochaHooks) === 'object' - ) { + requires.reduce((acc, mod) => { + let modpath = mod; + if (fs.existsSync(mod, {cwd}) || fs.existsSync(`${mod}.js`, {cwd})) { + modpath = path.resolve(mod); + debug('resolved required file %s to %s', mod, modpath); + } + const requiredModule = require(modpath); + if (type(requiredModule) === 'object' && requiredModule.mochaHooks) { + const mochaHooksType = type(requiredModule.mochaHooks); + if (/function$/.test(mochaHooksType) || mochaHooksType === 'object') { debug('found root hooks in required file %s', mod); - const rootHooks = requiredModule.mochaHooks; - acc.beforeAll = acc.beforeAll.concat(rootHooks.beforeAll || []); - acc.beforeEach = acc.beforeEach.concat(rootHooks.beforeEach || []); - acc.afterAll = acc.afterAll.concat(rootHooks.afterAll || []); - acc.afterEach = acc.afterEach.concat(rootHooks.afterEach || []); + acc.push(requiredModule.mochaHooks); + } else { + throw createUnsupportedError( + 'mochaHooks must be an object or a function returning (or fulfilling with) an object' + ); } - debug('loaded required file %s', mod); + } + debug('loaded required file %s', mod); + return acc; + }, []); + +/** + * Loads root hooks as exported via `mochaHooks` from required files. + * These can be sync/async functions returning objects, or just objects. + * Flattens to a single object. + * @param {Array} rootHooks - Array of root hooks + * @private + * @returns {import('./mocha').MochaRootHookObject} + */ +exports.loadRootHooks = async rootHooks => { + const rootHookObjects = await Promise.all( + rootHooks.map(async hook => (/function$/.test(type(hook)) ? hook() : hook)) + ); + + return rootHookObjects.reduce( + (acc, hook) => { + acc.beforeAll = acc.beforeAll.concat(hook.beforeAll || []); + acc.beforeEach = acc.beforeEach.concat(hook.beforeEach || []); + acc.afterAll = acc.afterAll.concat(hook.afterAll || []); + acc.afterEach = acc.afterEach.concat(hook.afterEach || []); return acc; }, - { - beforeAll: [], - beforeEach: [], - afterAll: [], - afterEach: [] - } + {beforeAll: [], beforeEach: [], afterAll: [], afterEach: []} ); +}; /** * Collect and load test files, then run mocha instance. diff --git a/lib/cli/run.js b/lib/cli/run.js index e6baaae679..2d5448f33e 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -18,6 +18,7 @@ const { list, handleRequires, validatePlugin, + loadRootHooks, runMocha } = require('./run-helpers'); const {ONE_AND_DONES, ONE_AND_DONE_ARGS} = require('./one-and-dones'); @@ -333,12 +334,18 @@ exports.builder = yargs => } // load requires first, because it can impact "plugin" validation - argv.rootHooks = handleRequires(argv.require); + argv.rawRootHooks = handleRequires(argv.require); validatePlugin(argv, 'reporter', Mocha.reporters); validatePlugin(argv, 'ui', Mocha.interfaces); return true; }) + .middleware(async argv => { + if (argv.rawRootHooks.length) { + argv.rootHooks = await loadRootHooks(argv.rawRootHooks); + delete argv.rawRootHooks; + } + }) .array(types.array) .boolean(types.boolean) .string(types.string) diff --git a/lib/mocha.js b/lib/mocha.js index ddb6856c8e..fdd0ed0d65 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -92,7 +92,7 @@ exports.Test = require('./test'); * @param {string} [options.ui] - Interface name. * @param {boolean} [options.parallel] - Run jobs in parallel * @param {number} [options.jobs] - Max number of worker processes for parallel runs - * @param {MochaRootHooks} [options.rootHooks] - Hooks to bootstrap the root + * @param {MochaRootHookObject} [options.rootHooks] - Hooks to bootstrap the root * suite with */ function Mocha(options) { @@ -145,7 +145,7 @@ function Mocha(options) { this.rootHooks(options.rootHooks); } - if (options.parallel) { + if (options.parallel && options.jobs > 1) { this._runner = require('./buffered-runner'); this.lazyLoadFiles = true; } else { @@ -885,7 +885,7 @@ Mocha.prototype.run = function(fn) { /** * Assigns hooks to the root suite - * @param {MochaRootHooks} [hooks] - Hooks to assign to root suite + * @param {MochaRootHookObject} [hooks] - Hooks to assign to root suite * @chainable */ Mocha.prototype.rootHooks = function rootHooks(hooks) { @@ -913,9 +913,15 @@ Mocha.prototype.rootHooks = function rootHooks(hooks) { /** * An alternative way to define root hooks that works with parallel runs. - * @typedef {Object} MochaRootHooks + * @typedef {Object} MochaRootHookObject * @property {Function|Function[]} [beforeAll] - "Before all" hook(s) * @property {Function|Function[]} [beforeEach] - "Before each" hook(s) * @property {Function|Function[]} [afterAll] - "After all" hook(s) * @property {Function|Function[]} [afterEach] - "After each" hook(s) */ + +/** + * An function that returns a {@link MochaRootHookObject}, either sync or async. + * @callback MochaRootHookFunction + * @returns {MochaRootHookObject|Promise} + */ diff --git a/lib/worker.js b/lib/worker.js index f6790a3c80..7a2fafb3ba 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -3,7 +3,11 @@ const {createInvalidArgumentTypeError} = require('./errors'); const workerpool = require('workerpool'); const Mocha = require('./mocha'); -const {handleRequires, validatePlugin} = require('./cli/run-helpers'); +const { + handleRequires, + validatePlugin, + loadRootHooks +} = require('./cli/run-helpers'); const debug = require('debug')(`mocha:parallel:worker:${process.pid}`); const {serialize} = require('./serializer'); const {setInterval, clearInterval} = global; @@ -29,13 +33,10 @@ if (workerpool.isMainThread) { * * @param {Options} argv - Command-line options */ -let bootstrap = argv => { - rootHooks = handleRequires(argv.require); +let bootstrap = async argv => { + const rawRootHooks = handleRequires(argv.require); + rootHooks = await loadRootHooks(rawRootHooks); validatePlugin(argv, 'ui', Mocha.interfaces); - process.on('beforeExit', () => { - /* istanbul ignore next */ - debug('exiting'); - }); bootstrap = () => {}; }; @@ -64,7 +65,7 @@ async function run(filepath, argv = {ui: 'bdd'}) { parallel: false }); - bootstrap(opts); + await bootstrap(opts); opts.rootHooks = rootHooks; diff --git a/test/integration/fixtures/options/require/root-hook-defs-c.fixture.js b/test/integration/fixtures/options/require/root-hook-defs-c.fixture.js new file mode 100644 index 0000000000..624973de75 --- /dev/null +++ b/test/integration/fixtures/options/require/root-hook-defs-c.fixture.js @@ -0,0 +1,16 @@ +'use strict'; + +exports.mochaHooks = async () => ({ + beforeAll() { + console.log('beforeAll'); + }, + beforeEach() { + console.log('beforeEach'); + }, + afterAll() { + console.log('afterAll'); + }, + afterEach() { + console.log('afterEach'); + } +}); diff --git a/test/integration/fixtures/options/require/root-hook-defs-d.fixture.js b/test/integration/fixtures/options/require/root-hook-defs-d.fixture.js new file mode 100644 index 0000000000..d073a35fcf --- /dev/null +++ b/test/integration/fixtures/options/require/root-hook-defs-d.fixture.js @@ -0,0 +1,36 @@ +'use strict'; + +exports.mochaHooks = async() => ({ + beforeAll: [ + function() { + console.log('beforeAll array 1'); + }, + function() { + console.log('beforeAll array 2'); + } + ], + beforeEach: [ + function() { + console.log('beforeEach array 1'); + }, + function() { + console.log('beforeEach array 2'); + } + ], + afterAll: [ + function() { + console.log('afterAll array 1'); + }, + function() { + console.log('afterAll array 2'); + } + ], + afterEach: [ + function() { + console.log('afterEach array 1'); + }, + function() { + console.log('afterEach array 2'); + } + ] +}); diff --git a/test/integration/fixtures/options/require/root-hook-test-2.fixture.js b/test/integration/fixtures/options/require/root-hook-test-2.fixture.js new file mode 100644 index 0000000000..4d00018d2c --- /dev/null +++ b/test/integration/fixtures/options/require/root-hook-test-2.fixture.js @@ -0,0 +1,6 @@ +// run with --require root-hook-defs-a.fixture.js --require +// root-hook-defs-b.fixture.js + +it('should also have some root hooks', function() { + // test +}); \ No newline at end of file diff --git a/test/integration/options/require.spec.js b/test/integration/options/require.spec.js index 726732cd5b..ba4e8a7e7a 100644 --- a/test/integration/options/require.spec.js +++ b/test/integration/options/require.spec.js @@ -4,7 +4,7 @@ var invokeMochaAsync = require('../helpers').invokeMochaAsync; describe('--require', function() { describe('when run in serial', function() { - it('should allow registration of root hooks', function() { + it('should allow registration of root hooks via mochaHooks object export', function() { return expect( invokeMochaAsync([ '--require=' + @@ -28,6 +28,31 @@ describe('--require', function() { /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ ); }); + + it('should allow registration of root hooks via mochaHooks function export', function() { + return expect( + invokeMochaAsync([ + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-c.fixture.js' + ), + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-d.fixture.js' + ), + require.resolve( + '../fixtures/options/require/root-hook-test.fixture.js' + ) + ])[1], + 'when fulfilled', + 'to contain output', + /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\n/ + ).and( + 'when fulfilled', + 'to contain output', + /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ + ); + }); }); describe('when run with --parallel', function() { @@ -56,5 +81,30 @@ describe('--require', function() { /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ ); }); + + it('should run root hooks for each job', function() { + return expect( + invokeMochaAsync([ + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-a.fixture.js' + ), + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-b.fixture.js' + ), + '--parallel', + require.resolve( + '../fixtures/options/require/root-hook-test.fixture.js' + ), + require.resolve( + '../fixtures/options/require/root-hook-test-2.fixture.js' + ) + ])[1], + 'when fulfilled', + 'to contain output', + /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\nafterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n\n {2}✓ should have some root hooks\nbeforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\nafterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n\n {2}✓ should also have some root hooks/ + ); + }); }); }); diff --git a/test/node-unit/cli/run-helpers.spec.js b/test/node-unit/cli/run-helpers.spec.js index a2a63335f5..1d8c25ef20 100644 --- a/test/node-unit/cli/run-helpers.spec.js +++ b/test/node-unit/cli/run-helpers.spec.js @@ -1,9 +1,13 @@ 'use strict'; -const {validatePlugin, list} = require('../../../lib/cli/run-helpers'); +const { + validatePlugin, + list, + loadRootHooks +} = require('../../../lib/cli/run-helpers'); const {createSandbox} = require('sinon'); -describe('cli "run" command', function() { +describe('run helper functions', function() { let sandbox; beforeEach(function() { @@ -14,36 +18,93 @@ describe('cli "run" command', function() { sandbox.restore(); }); - describe('helpers', function() { - describe('validatePlugin()', function() { - it('should disallow an array of module names', function() { - expect( - () => validatePlugin({foo: ['bar']}, 'foo'), - 'to throw a', - TypeError - ); + describe('loadRootHooks()', function() { + describe('when passed nothing', function() { + it('should reject', async function() { + return expect(loadRootHooks(), 'to be rejected'); }); }); - describe('list()', function() { - describe('when provided a flat array', function() { - it('should return a flat array', function() { - expect(list(['foo', 'bar']), 'to equal', ['foo', 'bar']); + describe('when passed empty array of hooks', function() { + it('should return an empty MochaRootHooks object', async function() { + return expect(loadRootHooks([]), 'to be fulfilled with', { + beforeAll: [], + beforeEach: [], + afterAll: [], + afterEach: [] }); }); - describe('when provided a nested array', function() { - it('should return a flat array', function() { - expect(list([['foo', 'bar'], 'baz']), 'to equal', [ - 'foo', - 'bar', - 'baz' - ]); - }); + }); + + describe('when passed an array containing hook objects and sync functions and async functions', function() { + it('should flatten them into a single object', async function() { + function a() {} + function b() {} + function d() {} + function g() {} + function f() {} + function c() { + return { + beforeAll: d, + beforeEach: g + }; + } + async function e() { + return { + afterEach: f + }; + } + return expect( + loadRootHooks([ + { + beforeEach: a + }, + { + afterAll: b + }, + c, + e + ]), + 'to be fulfilled with', + { + beforeAll: [d], + beforeEach: [a, g], + afterAll: [b], + afterEach: [f] + } + ); }); - describe('when given a comma-delimited string', function() { - it('should return a flat array', function() { - expect(list('foo,bar'), 'to equal', ['foo', 'bar']); - }); + }); + }); + + describe('validatePlugin()', function() { + it('should disallow an array of module names', function() { + expect( + () => validatePlugin({foo: ['bar']}, 'foo'), + 'to throw a', + TypeError + ); + }); + }); + + describe('list()', function() { + describe('when provided a flat array', function() { + it('should return a flat array', function() { + expect(list(['foo', 'bar']), 'to equal', ['foo', 'bar']); + }); + }); + describe('when provided a nested array', function() { + it('should return a flat array', function() { + expect(list([['foo', 'bar'], 'baz']), 'to equal', [ + 'foo', + 'bar', + 'baz' + ]); + }); + }); + describe('when given a comma-delimited string', function() { + it('should return a flat array', function() { + expect(list('foo,bar'), 'to equal', ['foo', 'bar']); }); }); }); diff --git a/test/node-unit/worker.spec.js b/test/node-unit/worker.spec.js index 58b4fdbd7d..94c3ceef18 100644 --- a/test/node-unit/worker.spec.js +++ b/test/node-unit/worker.spec.js @@ -49,7 +49,8 @@ describe('worker', function() { runHelpers = { handleRequires: sandbox.stub(), - validatePlugin: sandbox.stub() + validatePlugin: sandbox.stub(), + loadRootHooks: sandbox.stub().resolves() }; worker = rewiremock.proxy(WORKER_PATH, { From e289e407832a41e8450248e166dc00cb87df6645 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 10 Apr 2020 16:31:36 -0700 Subject: [PATCH 62/92] improvified some debug statements --- lib/runner.js | 40 +++++++++++++++++++++++++++++----------- lib/worker.js | 12 ++++++------ 2 files changed, 35 insertions(+), 17 deletions(-) diff --git a/lib/runner.js b/lib/runner.js index 1636d53f3e..8e467dc0d8 100644 --- a/lib/runner.js +++ b/lib/runner.js @@ -173,7 +173,7 @@ inherits(Runner, EventEmitter); * @return {Runner} Runner instance. */ Runner.prototype.grep = function(re, invert) { - debug('grep %s', re); + debug('grep(): setting to %s', re); this._grep = re; this._invert = invert; this.total = this.grepTotal(this.suite); @@ -238,7 +238,7 @@ Runner.prototype.globals = function(arr) { if (!arguments.length) { return this._globals; } - debug('globals %j', arr); + debug('globals(): setting to %O', arr); this._globals = this._globals.concat(arr); return this; }; @@ -718,9 +718,10 @@ Runner.prototype.runSuite = function(suite, fn) { var self = this; var total = this.grepTotal(suite); - debug('run suite %s', suite.fullTitle()); + debug('runSuite(): running %s', suite.fullTitle()); if (!total || (self.failures && suite._bail)) { + debug('runSuite(): bailing'); return fn(); } @@ -791,17 +792,19 @@ Runner.prototype.runSuite = function(suite, fn) { */ Runner.prototype.uncaught = function(err) { if (err instanceof Pending) { + debug('uncaught(): caught a Pending'); return; } // browser does not exit script when throwing in global.onerror() if (this.allowUncaught && !process.browser) { + debug('uncaught(): bubbling exception due to --allow-uncaught'); throw err; } if (err) { - debug('uncaught exception %O', err); + debug('uncaught(): got truthy exception %O', err); } else { - debug('uncaught undefined/falsy exception'); + debug('uncaught(): undefined/falsy exception'); err = createInvalidExceptionError( 'Caught falsy/undefined exception which would otherwise be uncaught. No stack trace found; try a debugger', err @@ -810,6 +813,7 @@ Runner.prototype.uncaught = function(err) { if (!isError(err)) { err = thrown2Error(err); + debug('uncaught(): converted "error" %o to Error', err); } err.uncaught = true; @@ -817,12 +821,15 @@ Runner.prototype.uncaught = function(err) { if (!runnable) { runnable = new Runnable('Uncaught error outside test suite'); + debug('uncaught(): no current Runnable; created a phony one'); runnable.parent = this.suite; if (this.started) { + debug('uncaught(): failing gracefully'); this.fail(runnable, err); } else { // Can't recover from this failure + debug('uncaught(): test run has not yet started; unrecoverable'); this.emit(constants.EVENT_RUN_BEGIN); this.fail(runnable, err); this.emit(constants.EVENT_RUN_END); @@ -834,9 +841,11 @@ Runner.prototype.uncaught = function(err) { runnable.clearTimeout(); if (runnable.isFailed()) { + debug('uncaught(): Runnable has already failed'); // Ignore error if already failed return; } else if (runnable.isPending()) { + debug('uncaught(): pending Runnable wound up failing!'); // report 'pending test' retrospectively as failed this.fail(runnable, err, true); return; @@ -845,10 +854,11 @@ Runner.prototype.uncaught = function(err) { // we cannot recover gracefully if a Runnable has already passed // then fails asynchronously if (runnable.isPassed()) { + debug('uncaught(): Runnable has already passed; bailing gracefully'); this.fail(runnable, err); this.abort(); } else { - debug(runnable); + debug('uncaught(): forcing Runnable to complete with Error'); return runnable.callback(err); } }; @@ -885,24 +895,31 @@ Runner.prototype.run = function(fn, opts) { } function start() { + debug('run(): starting'); // If there is an `only` filter if (rootSuite.hasOnly()) { rootSuite.filterOnly(); + debug('run(): filtered exclusive Runnables'); } self.started = true; if (self._delay) { self.emit(constants.EVENT_DELAY_END); + debug('run(): "delay" ended'); } + debug('run(): emitting %s', constants.EVENT_RUN_BEGIN); self.emit(constants.EVENT_RUN_BEGIN); + debug('run(): emitted %s', constants.EVENT_RUN_BEGIN); self.runSuite(rootSuite, function() { - debug('finished running'); + debug( + 'run(): root suite completed; emitting %s', + constants.EVENT_RUN_END + ); self.emit(constants.EVENT_RUN_END); + debug('run(): emitted %s', constants.EVENT_RUN_END); }); } - debug(constants.EVENT_RUN_BEGIN); - // references cleanup to avoid memory leaks this.on(constants.EVENT_SUITE_END, function(suite) { suite.cleanReferences(); @@ -910,9 +927,9 @@ Runner.prototype.run = function(fn, opts) { // callback this.on(constants.EVENT_RUN_END, function() { - debug(constants.EVENT_RUN_END); process.removeListener('uncaughtException', uncaught); process.on('uncaughtException', self.uncaughtEnd); + debug('run(): emitted %s', constants.EVENT_RUN_END); fn(self.failures); }); @@ -925,6 +942,7 @@ Runner.prototype.run = function(fn, opts) { // might be nice to debounce some dots while we wait. this.emit(constants.EVENT_DELAY_BEGIN, rootSuite); rootSuite.once(EVENT_ROOT_SUITE_RUN, start); + debug('run(): waiting for green light due to --delay'); } else { Runner.immediately(function() { start(); @@ -942,7 +960,7 @@ Runner.prototype.run = function(fn, opts) { * @return {Runner} Runner instance. */ Runner.prototype.abort = function() { - debug('aborting'); + debug('abort(): aborting'); this._abort = true; return this; diff --git a/lib/worker.js b/lib/worker.js index 7a2fafb3ba..0feb4b6f96 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -56,7 +56,7 @@ async function run(filepath, argv = {ui: 'bdd'}) { ); } - debug('running test file %s', filepath); + debug('run(): running test file %s', filepath); const opts = Object.assign(argv, { // workers only use the `Buffered` reporter. @@ -74,30 +74,30 @@ async function run(filepath, argv = {ui: 'bdd'}) { try { await mocha.loadFilesAsync(); } catch (err) { - debug('could not load file %s: %s', filepath, err); + debug('run(): could not load file %s: %s', filepath, err); throw err; } return new Promise((resolve, reject) => { const t = setInterval(() => { - debug('still running %s...', filepath); + debug('run(): still running %s...', filepath); }, 5000).unref(); mocha.run(result => { // Runner adds these; if we don't remove them, we'll get a leak. process.removeAllListeners('uncaughtException'); debug( - 'completed run with %d test failures', + 'run(): completed run with %d test failures', typeof result.failures === 'number' ? result.failures : 0 ); try { const serialized = serialize(result); - debug('returning to main process'); + debug('run(): returning to main process'); resolve(serialized); } catch (err) { // TODO: figure out exactly what the sad path looks like here. // rejection should only happen if an error is "unrecoverable" - debug('rejecting: %O', err); + debug('run(): rejecting: %O', err); reject(err); } finally { clearInterval(t); From 28158ef42500f1bb218195e3afd5c66a3872f4bc Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 11:44:32 -0700 Subject: [PATCH 63/92] remove some done TODOs --- lib/buffered-runner.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 55ca246d5b..c77cc6300f 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -40,11 +40,7 @@ class BufferedRunner extends Runner { * run. The information is processed as it's received, and emitted to a * {@link Reporter}, which is likely listening for these events. * - * @todo handle tests in a specific order, e.g., via `--file`? * @todo handle delayed runs? - * @todo graceful failure - * @todo audit `BufferedEvent` objects; e.g. do tests need a `parent` prop? - * @todo should we just instantiate a `Test` object from the `BufferedEvent`? * @param {Function} callback - Called with an exit code corresponding to * number of test failures. * @param {{files: string[], options: Options}} opts - Files to run and From 63d703ac7c126722eb2c88e3ad0cd3ffc9b41edd Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 11:45:22 -0700 Subject: [PATCH 64/92] refactor: rename param name in Mocha#reporter method for clarity --- lib/mocha.js | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/lib/mocha.js b/lib/mocha.js index fdd0ed0d65..538af8b09a 100644 --- a/lib/mocha.js +++ b/lib/mocha.js @@ -202,20 +202,20 @@ Mocha.prototype.addFile = function(file) { * // Use XUnit reporter and direct its output to file * mocha.reporter('xunit', { output: '/path/to/testspec.xunit.xml' }); */ -Mocha.prototype.reporter = function(reporter, reporterOptions) { - if (typeof reporter === 'function') { - this._reporter = reporter; +Mocha.prototype.reporter = function(reporterName, reporterOptions) { + if (typeof reporterName === 'function') { + this._reporter = reporterName; } else { - reporter = reporter || 'spec'; - var _reporter; + reporterName = reporterName || 'spec'; + var reporter; // Try to load a built-in reporter. - if (builtinReporters[reporter]) { - _reporter = builtinReporters[reporter]; + if (builtinReporters[reporterName]) { + reporter = builtinReporters[reporterName]; } // Try to load reporters from process.cwd() and node_modules - if (!_reporter) { + if (!reporter) { try { - _reporter = require(reporter); + reporter = require(reporterName); } catch (err) { if ( err.code !== 'MODULE_NOT_FOUND' || @@ -223,31 +223,31 @@ Mocha.prototype.reporter = function(reporter, reporterOptions) { ) { // Try to load reporters from a path (absolute or relative) try { - _reporter = require(path.resolve(process.cwd(), reporter)); + reporter = require(path.resolve(process.cwd(), reporterName)); } catch (_err) { _err.code !== 'MODULE_NOT_FOUND' || _err.message.indexOf('Cannot find module') !== -1 - ? console.warn(sQuote(reporter) + ' reporter not found') + ? console.warn(sQuote(reporterName) + ' reporter not found') : console.warn( - sQuote(reporter) + + sQuote(reporterName) + ' reporter blew up with error:\n' + err.stack ); } } else { console.warn( - sQuote(reporter) + ' reporter blew up with error:\n' + err.stack + sQuote(reporterName) + ' reporter blew up with error:\n' + err.stack ); } } } - if (!_reporter) { + if (!reporter) { throw createInvalidReporterError( - 'invalid reporter ' + sQuote(reporter), - reporter + 'invalid reporter ' + sQuote(reporterName), + reporterName ); } - this._reporter = _reporter; + this._reporter = reporter; } this.options.reporterOption = reporterOptions; // alias option name is used in public reporters xunit/tap/progress From 680240fbc8a606076ea10771549a4f52b82be87a Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 11:46:46 -0700 Subject: [PATCH 65/92] refactor validatePlugin() to throw nicer errors created `createInvalidPlugin` method in `lib/errors.js` --- lib/cli/run-helpers.js | 29 ++++++++----- lib/errors.js | 23 ++++++++++- test/node-unit/cli/run-helpers.spec.js | 56 +++++++++++++++++++++++--- 3 files changed, 91 insertions(+), 17 deletions(-) diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index 60a774f5c4..aa0efcf58e 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -13,7 +13,7 @@ const debug = require('debug')('mocha:cli:run:helpers'); const {watchRun, watchParallelRun} = require('./watch-run'); const collectFiles = require('./collect-files'); const {type} = require('../utils'); -const {createUnsupportedError} = require('../errors'); +const {createUnsupportedError, createInvalidPluginError} = require('../errors'); const cwd = process.cwd(); /** @@ -218,22 +218,31 @@ exports.runMocha = async (mocha, options) => { }; /** - * Used for `--reporter` and `--ui`. Ensures there's only one, and asserts - * that it actually exists. - * @todo XXX This must get run after requires are processed, as it'll prevent - * interfaces from loading. + * Used for `--reporter` and `--ui`. Ensures there's only one, and asserts that + * it actually exists. This must be run _after_ requires are processed (see + * {@link handleRequires}), as it'll prevent interfaces from loading otherwise. * @param {Object} opts - Options object * @param {string} key - Resolvable module name or path - * @param {Object} [map] - An object perhaps having key `key` + * @param {Object} [map] - An object perhaps having key `key`. Used as a cache + * of sorts; `Mocha.reporters` is one, where each key corresponds to a reporter + * name * @private */ exports.validatePlugin = (opts, key, map = {}) => { if (Array.isArray(opts[key])) { - throw new TypeError(`"--${key} <${key}>" can only be specified once`); + throw createInvalidPluginError( + `"--${key} <${key}>" can only be specified once`, + key + ); } - const unknownError = () => new Error(`Unknown "${key}": ${opts[key]}`); + const unknownError = err => + createInvalidPluginError( + `Could not load ${key} "${opts[key]}": ${err}`, + key + ); + // if this exists, then it's already loaded, so nothing more to do. if (!map[opts[key]]) { try { opts[key] = require(opts[key]); @@ -243,10 +252,10 @@ exports.validatePlugin = (opts, key, map = {}) => { try { opts[key] = require(path.resolve(process.cwd(), opts[key])); } catch (err) { - throw unknownError(); + throw unknownError(err); } } else { - throw unknownError(); + throw unknownError(err); } } } diff --git a/lib/errors.js b/lib/errors.js index fafee70eee..5a1a4ff6b1 100644 --- a/lib/errors.js +++ b/lib/errors.js @@ -129,6 +129,26 @@ function createInvalidExceptionError(message, value) { return err; } +/** + * Dynamically creates a plugin-type-specific error based on plugin type + * @param {string} message - Error message + * @param {"reporter"|"interface"} pluginType - Plugin type. Future: expand as needed + * @param {string} pluginName - Name of plugin + * @throws When `pluginType` is not known + * @public + * @returns {Error} + */ +function createInvalidPluginError(message, pluginType, pluginName) { + switch (pluginType) { + case 'reporter': + return createInvalidReporterError(message, pluginName); + case 'interface': + return createInvalidInterfaceError(message, pluginName); + default: + throw new Error('unknown pluginType "' + pluginType + '"'); + } +} + module.exports = { createInvalidArgumentTypeError: createInvalidArgumentTypeError, createInvalidArgumentValueError: createInvalidArgumentValueError, @@ -137,5 +157,6 @@ module.exports = { createInvalidReporterError: createInvalidReporterError, createMissingArgumentError: createMissingArgumentError, createNoFilesMatchPatternError: createNoFilesMatchPatternError, - createUnsupportedError: createUnsupportedError + createUnsupportedError: createUnsupportedError, + createInvalidPluginError: createInvalidPluginError }; diff --git a/test/node-unit/cli/run-helpers.spec.js b/test/node-unit/cli/run-helpers.spec.js index 1d8c25ef20..9dd249ccfb 100644 --- a/test/node-unit/cli/run-helpers.spec.js +++ b/test/node-unit/cli/run-helpers.spec.js @@ -78,12 +78,56 @@ describe('run helper functions', function() { }); describe('validatePlugin()', function() { - it('should disallow an array of module names', function() { - expect( - () => validatePlugin({foo: ['bar']}, 'foo'), - 'to throw a', - TypeError - ); + describe('when used with "reporter" key', function() { + it('should disallow an array of names', function() { + expect( + () => validatePlugin({reporter: ['bar']}, 'reporter'), + 'to throw', + { + code: 'ERR_MOCHA_INVALID_REPORTER', + message: /can only be specified once/i + } + ); + }); + + it('should fail to recognize an unknown reporter', function() { + expect( + () => validatePlugin({reporter: 'bar'}, 'reporter'), + 'to throw', + {code: 'ERR_MOCHA_INVALID_REPORTER', message: /cannot find module/i} + ); + }); + }); + + describe('when used with an "interfaces" key', function() { + it('should disallow an array of names', function() { + expect( + () => validatePlugin({interface: ['bar']}, 'interface'), + 'to throw', + { + code: 'ERR_MOCHA_INVALID_INTERFACE', + message: /can only be specified once/i + } + ); + }); + + it('should fail to recognize an unknown interface', function() { + expect( + () => validatePlugin({interface: 'bar'}, 'interface'), + 'to throw', + {code: 'ERR_MOCHA_INVALID_INTERFACE', message: /cannot find module/i} + ); + }); + }); + + describe('when used with an unknown plugin type', function() { + it('should fail', function() { + expect( + () => validatePlugin({frog: ['bar']}, 'frog'), + 'to throw', + /unknown plugin/i + ); + }); }); }); From 4b9646126b9d4ba3ec6d4edca9ca9ba95203b492 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 15:25:08 -0700 Subject: [PATCH 66/92] exit fixes - issue `SIGKILL` in `exit.spec.js` to avoid a potential exception when quitting - main mocha executable: use `SIGKILL` instead of unsupported `SIGTERM` on main subprocess in win32. --- bin/mocha | 15 +++++++++++++-- test/integration/options/exit.spec.js | 3 +-- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/bin/mocha b/bin/mocha index ff8e6c8c46..22252b1021 100755 --- a/bin/mocha +++ b/bin/mocha @@ -126,9 +126,20 @@ if (Object.keys(nodeArgs).length) { // terminate children. process.on('SIGINT', () => { - proc.kill('SIGINT'); // calls runner.abort() + // XXX: a previous comment said this would abort the runner, but I can't see that it does + // anything with the default runner. + proc.kill('SIGINT'); + // if running in parallel mode, we will have a proper SIGINT handler, so the below won't + // be needed. if (!args.parallel || args.jobs < 2) { - proc.kill('SIGTERM'); // if that didn't work, we're probably in an infinite loop, so make it die. + // win32 does not support SIGTERM, so use next best thing. + if (require('os').platform() === 'win32') { + proc.kill('SIGKILL'); + } else { + // using SIGKILL won't cleanly close the output streams, which can result + // in cut-off text or a befouled terminal. + proc.kill('SIGTERM'); + } } }); } else { diff --git a/test/integration/options/exit.spec.js b/test/integration/options/exit.spec.js index 3aaa11c82b..cde3c35b96 100644 --- a/test/integration/options/exit.spec.js +++ b/test/integration/options/exit.spec.js @@ -12,8 +12,7 @@ describe('--exit', function() { var mocha; function killSubprocess() { - mocha.kill('SIGINT'); - mocha.kill('SIGTERM'); + mocha.kill('SIGKILL'); } // these two handlers deal with a ctrl-c on command-line From 97561ad6cbcedb5ca4f4c54b1f74f58207930b9e Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 15:41:04 -0700 Subject: [PATCH 67/92] fix jsdoc types --- lib/buffered-runner.js | 2 +- lib/cli/run-helpers.js | 12 ++++++------ lib/cli/watch-run.js | 20 ++++++++++---------- lib/serializer.js | 2 +- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index c77cc6300f..333ca00429 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -62,7 +62,7 @@ class BufferedRunner extends Runner { let debugInterval; /** - * @type {import('workerpool').WorkerPool} + * @type {WorkerPool} */ let pool; diff --git a/lib/cli/run-helpers.js b/lib/cli/run-helpers.js index aa0efcf58e..2d2eaa088d 100644 --- a/lib/cli/run-helpers.js +++ b/lib/cli/run-helpers.js @@ -78,7 +78,7 @@ exports.list = str => * * Returns array of `mochaHooks` exports, if any. * @param {string[]} requires - Modules to require - * @returns {Array} Any root hooks + * @returns {MochaRootHookObject|MochaRootHookFunction} Any root hooks * @private */ exports.handleRequires = (requires = []) => @@ -108,9 +108,9 @@ exports.handleRequires = (requires = []) => * Loads root hooks as exported via `mochaHooks` from required files. * These can be sync/async functions returning objects, or just objects. * Flattens to a single object. - * @param {Array} rootHooks - Array of root hooks + * @param {Array} rootHooks - Array of root hooks * @private - * @returns {import('./mocha').MochaRootHookObject} + * @returns {MochaRootHookObject} */ exports.loadRootHooks = async rootHooks => { const rootHookObjects = await Promise.all( @@ -158,7 +158,7 @@ const singleRun = async (mocha, {exit}, fileCollectParams) => { * @param {Options} options - Command line options * @param {Object} fileCollectParams - Parameters that control test * file collection. See `lib/cli/collect-files.js`. - * @returns {Promise} + * @returns {Promise} * @ignore * @private */ @@ -182,9 +182,9 @@ const parallelRun = async (mocha, options, fileCollectParams) => { * - `parallelRun`: run tests in parallel & exit * - `watchParallelRun`: run tests in parallel, rerunning as files change * @param {Mocha} mocha - Mocha instance - * @param {Mocha.MochaOptions} opts - Command line options + * @param {Options} opts - Command line options * @private - * @returns {Promise} + * @returns {Promise} */ exports.runMocha = async (mocha, options) => { const { diff --git a/lib/cli/watch-run.js b/lib/cli/watch-run.js index 1f8589d4b9..d77740dd93 100644 --- a/lib/cli/watch-run.js +++ b/lib/cli/watch-run.js @@ -23,7 +23,7 @@ const collectFiles = require('./collect-files'); * `chokidar.watch`. * @param {string[]} opts.watchIgnore - List of paths and patterns to * exclude from watching. See `ignored` option of `chokidar`. - * @param {import('./collect-files').FileCollectionOptions} fileCollectParams - Parameters that control test + * @param {FileCollectionOptions} fileCollectParams - Parameters that control test * @private */ exports.watchParallelRun = ( @@ -59,7 +59,7 @@ exports.watchParallelRun = ( * `chokidar.watch`. * @param {string[]} opts.watchIgnore - List of paths and patterns to * exclude from watching. See `ignored` option of `chokidar`. - * @param {import('./collect-files').FileCollectionOptions} fileCollectParams - Parameters that control test + * @param {FileCollectionOptions} fileCollectParams - Parameters that control test * file collection. See `lib/cli/collect-files.js`. * @private */ @@ -118,8 +118,8 @@ exports.watchRun = (mocha, {watchFiles, watchIgnore}, fileCollectParams) => { * `chokidar.watch`. * @param {string[]} [opts.watchIgnore] - List of paths and patterns to exclude * from watching. See `ignored` option of `chokidar`. - * @param {import('./collect-files').FileCollectionOptions} opts.fileCollectParams - List of extensions to watch if `opts.watchFiles` is not given. - * @returns {import('chokidar').FSWatcher} + * @param {FileCollectionOptions} opts.fileCollectParams - List of extensions to watch if `opts.watchFiles` is not given. + * @returns {FSWatcher} * @ignore * @private */ @@ -179,7 +179,7 @@ const createWatcher = ( * Create an object that allows you to rerun tests on the mocha instance. * * @param {Mocha} mocha - Mocha instance - * @param {import('chokidar').FSWatcher} watcher - chokidar `FSWatcher` instance + * @param {FSWatcher} watcher - chokidar `FSWatcher` instance * @param {Object} [opts] - Options! * @param {BeforeWatchRun} [opts.beforeRun] - Function to call before `mocha.run()` * @param {AfterWatchRun} [opts.afterRun] - Function to call after `mocha.run()` @@ -282,7 +282,7 @@ const eraseLine = () => { /** * Blast all of the watched files out of `require.cache` - * @param {import('chokidar').FSWatcher} watcher - chokidar FSWatcher + * @param {FSWatcher} watcher - chokidar FSWatcher * @ignore * @private */ @@ -299,7 +299,7 @@ const blastCache = watcher => { * Optionally, it can return a new `Mocha` instance. * @callback BeforeWatchRun * @private - * @param {{mocha: Mocha, watcher: import('chokidar').FSWatcher}} options + * @param {{mocha: Mocha, watcher: FSWatcher}} options * @returns {Mocha} */ @@ -308,7 +308,7 @@ const blastCache = watcher => { * require cache. * @callback AfterWatchRun * @private - * @param {{mocha: Mocha, watcher: import('chokidar').FSWatcher}} options + * @param {{mocha: Mocha, watcher: FSWatcher}} options * @returns {void} */ @@ -316,6 +316,6 @@ const blastCache = watcher => { * Object containing run control methods * @typedef {Object} Rerunner * @private - * @property {() => void} run - Calls `mocha.run()` - * @property {() => void} scheduleRun - Schedules another call to `run` + * @property {Function} run - Calls `mocha.run()` + * @property {Function} scheduleRun - Schedules another call to `run` */ diff --git a/lib/serializer.js b/lib/serializer.js index 5ff523faf6..db2166a415 100644 --- a/lib/serializer.js +++ b/lib/serializer.js @@ -169,7 +169,7 @@ class SerializableEvent { /** * Used internally by {@link SerilizableEvent#serialize}. * @ignore - * @param {[object, string][]} pairs - List of parent/key tuples to process; modified in-place + * @param {Array} pairs - List of parent/key tuples to process; modified in-place. This JSDoc type is an approximation * @param {object} parent - Some parent object * @param {string} key - Key to inspect * @param {WeakSet} seenObjects - For avoiding circular references From a0423a04c8bde26c9263f25b78256860154c3c9e Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 15:58:22 -0700 Subject: [PATCH 68/92] avoid flake in --require test via burly regexp --- test/integration/options/require.spec.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/integration/options/require.spec.js b/test/integration/options/require.spec.js index ba4e8a7e7a..2026243f36 100644 --- a/test/integration/options/require.spec.js +++ b/test/integration/options/require.spec.js @@ -103,7 +103,11 @@ describe('--require', function() { ])[1], 'when fulfilled', 'to contain output', - /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\nafterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n\n {2}✓ should have some root hooks\nbeforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\nafterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n\n {2}✓ should also have some root hooks/ + /(beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2[^]+){2}/ + ).and( + 'when fulfilled', + 'to contain output', + /(afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2[^]+){2}/ ); }); }); From 118c137aceb27781a758bd4cd0dc8c169e507393 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Tue, 14 Apr 2020 15:59:44 -0700 Subject: [PATCH 69/92] improve another regexp in --require test Signed-off-by: Christopher Hiller --- test/integration/options/require.spec.js | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/test/integration/options/require.spec.js b/test/integration/options/require.spec.js index 2026243f36..a6c8ef7ea2 100644 --- a/test/integration/options/require.spec.js +++ b/test/integration/options/require.spec.js @@ -74,11 +74,7 @@ describe('--require', function() { ])[1], 'when fulfilled', 'to contain output', - /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\n/ - ).and( - 'when fulfilled', - 'to contain output', - /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ + /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2[^]+afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2/ ); }); From 6afce46518eddeff30f0dc7a6cd1217a03659817 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 15 Apr 2020 16:17:15 -0700 Subject: [PATCH 70/92] update some debug statements in bin/mocha --- bin/mocha | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/bin/mocha b/bin/mocha index 22252b1021..9ae2514d83 100755 --- a/bin/mocha +++ b/bin/mocha @@ -108,7 +108,9 @@ if (Object.keys(nodeArgs).length) { unparse(mochaArgs, {alias: aliases}) ); - debug(`exec ${process.execPath} w/ args:`, args); + debug( + `forking child process via command: ${process.execPath} ${args.join(' ')}` + ); const proc = spawn(process.execPath, args, { stdio: 'inherit' @@ -128,6 +130,7 @@ if (Object.keys(nodeArgs).length) { process.on('SIGINT', () => { // XXX: a previous comment said this would abort the runner, but I can't see that it does // anything with the default runner. + debug('main process caught SIGINT'); proc.kill('SIGINT'); // if running in parallel mode, we will have a proper SIGINT handler, so the below won't // be needed. @@ -138,10 +141,12 @@ if (Object.keys(nodeArgs).length) { } else { // using SIGKILL won't cleanly close the output streams, which can result // in cut-off text or a befouled terminal. + debug('sending SIGTERM to child process'); proc.kill('SIGTERM'); } } }); } else { + debug('running Mocha in-process'); require('../lib/cli/cli').main(unparse(mochaArgs, {alias: aliases})); } From 813016bf1a6232a8be055868be9a2bec0b5a2735 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 15 Apr 2020 16:17:33 -0700 Subject: [PATCH 71/92] fix landing reporter to not spam SIGINT handlers --- lib/reporters/landing.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/lib/reporters/landing.js b/lib/reporters/landing.js index c49c10781b..03c9acb1af 100644 --- a/lib/reporters/landing.js +++ b/lib/reporters/landing.js @@ -98,13 +98,15 @@ function Landing(runner, options) { process.stdout.write('\n'); self.epilogue(); }); -} -// if cursor is hidden when we ctrl-C, then it will remain hidden unless... -process.once('SIGINT', function() { - cursor.show(); - process.emit('SIGINT'); -}); + // if cursor is hidden when we ctrl-C, then it will remain hidden unless... + process.once('SIGINT', function() { + cursor.show(); + process.nextTick(function() { + process.kill(process.pid, 'SIGINT'); + }); + }); +} /** * Inherit from `Base.prototype`. From bcc0e4e1a979670f9a11ea21e41012eadba52acb Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 15 Apr 2020 16:20:16 -0700 Subject: [PATCH 72/92] fix integration test helper to not completely ignore SIGINT also allows override of `DEBUG` in environment if we really need it. --- test/integration/helpers.js | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/integration/helpers.js b/test/integration/helpers.js index aee256fa96..918b0b4198 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -345,18 +345,20 @@ function _spawnMochaWithListeners(args, fn, opts) { var output = ''; opts = opts || {}; if (opts === 'pipe') { - opts = {stdio: 'pipe'}; + opts = {stdio: ['inherit', 'pipe', 'pipe']}; } + var env = Object.assign({}, process.env); + // prevent DEBUG from borking STDERR when piping, unless explicitly set via `opts` + delete env.DEBUG; + opts = Object.assign( { cwd: process.cwd(), - stdio: ['ignore', 'pipe', 'inherit'], - env: Object.assign({}, process.env) + stdio: ['inherit', 'pipe', 'inherit'], + env: env }, opts ); - // prevent DEBUG from borking STDERR when piping. - delete opts.env.DEBUG; debug('spawning: %s', [process.execPath].concat(args).join(' ')); var mocha = spawn(process.execPath, args, opts); From 88887b6e810a98e877675d11870967bab77e6614 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 15 Apr 2020 17:05:43 -0700 Subject: [PATCH 73/92] BufferedRunner fixes - do not force-terminate worker pool when bailing (unnecessary) - use a finite-state-machine-ish thing to manage state - "correctly" resend `SIGINT` in cleanup listener - differentiate between "bailing" and "aborting", which have diff't behaviors: - "bailing" is the `bail` flag - "aborting" is either an uncaught exception (with `--allow-uncaught`) _or_ a `SIGINT` signal --- lib/buffered-runner.js | 106 +++++++++++++++++-------- test/node-unit/buffered-runner.spec.js | 10 +-- 2 files changed, 80 insertions(+), 36 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 333ca00429..cea850dbbb 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -9,6 +9,7 @@ const workerpool = require('workerpool'); const {deserialize} = require('./serializer'); const WORKER_PATH = require.resolve('./worker.js'); const {setInterval, clearInterval} = global; +const {createMap} = require('./utils'); const debugStats = pool => { const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); debug( @@ -25,11 +26,51 @@ const debugStats = pool => { */ const DEBUG_STATS_INTERVAL = 5000; +const ABORTED = 'ABORTED'; +const IDLE = 'IDLE'; +const ABORTING = 'ABORTING'; +const RUNNING = 'RUNNING'; +const BAILING = 'BAILING'; +const BAILED = 'BAILED'; +const COMPLETE = 'COMPLETE'; + +const states = createMap({ + [IDLE]: new Set([RUNNING, ABORTING]), + [RUNNING]: new Set([COMPLETE, BAILING, ABORTING]), + [COMPLETE]: new Set(), + [ABORTED]: new Set(), + [ABORTING]: new Set([ABORTED]), + [BAILING]: new Set([BAILED, ABORTING]), + [BAILED]: new Set([COMPLETE, ABORTING]) +}); + /** * This `Runner` delegates tests runs to worker threads. Does not execute any * {@link Runnable}s by itself! */ class BufferedRunner extends Runner { + constructor(...args) { + super(...args); + + let state = IDLE; + Object.defineProperty(this, '_state', { + get() { + return state; + }, + set(newState) { + if (states[state].has(newState)) { + state = newState; + } else { + throw new Error(`invalid state transition: ${state} => ${newState}`); + } + } + }); + + this.once('EVENT_RUN_END', () => { + this._state = COMPLETE; + }); + } + /** * Runs Mocha tests by creating a thread pool, then delegating work to the * worker threads. @@ -75,28 +116,28 @@ class BufferedRunner extends Runner { }); sigIntListener = async () => { - if (!didAbort) { - didAbort = true; + if (this._state !== ABORTING) { + debug('run(): caught a SIGINT'); + this._state = ABORTING; + try { - debug('shutting down %d (max) workers', jobs); + debug('run(): shutting down %d (max) workers', jobs); await pool.terminate(true); } catch (err) { - console.error(err); + console.error( + `Error while attempting to force-terminate worker pool: ${err}` + ); } finally { - process.exit(128); + process.nextTick(() => { + debug('run(): imminent death'); + this._state = 'ABORTED'; + process.kill(process.pid, 'SIGINT'); + }); } } }; - process.on('SIGINT', sigIntListener); - - /** - * This flag is used by all workers in the pool; it tells them that we - * aborted _on purpose_, because of "bail". If it's _not_ true, an - * abnormal termination of the worker process is considered a fatal - * error. - */ - let didAbort = false; + process.once('SIGINT', sigIntListener); // the "pool proxy" object is essentially just syntactic sugar to call a // worker's procedure as one would a regular function. @@ -109,6 +150,8 @@ class BufferedRunner extends Runner { // this is set for uncaught exception handling in `Runner#uncaught` this.started = true; + this._state = RUNNING; + this.emit(EVENT_RUN_BEGIN); const results = await allSettled( @@ -116,6 +159,10 @@ class BufferedRunner extends Runner { debug('run(): enqueueing test file %s', file); try { const result = await poolProxy.run(file, options); + if (this._state === BAILED) { + // short-circuit after a graceful bail + return; + } const {failureCount, events} = deserialize(result); debug( 'run(): completed run of file %s; %d failures / %d events', @@ -128,44 +175,40 @@ class BufferedRunner extends Runner { * If we set this, then we encountered a "bail" flag, and will * terminate the pool once all events have been emitted. */ - let shouldAbort = false; let event = events.shift(); while (event) { this.emit(event.eventName, event.data, event.error); if ( - (failureCount || event.error) && + this._state !== BAILING && event.data && - event.data._bail + event.data._bail && + (failureCount || event.error) ) { debug('run(): nonzero failure count & found bail flag'); // we need to let the events complete for this file, as the worker // should run any cleanup hooks - shouldAbort = true; + this._state = BAILING; } event = events.shift(); } - if (shouldAbort) { - didAbort = true; + if (this._state === BAILING) { debug('run(): terminating pool due to "bail" flag'); - await pool.terminate(true); + this._state = BAILED; + await pool.terminate(); } } catch (err) { - if (didAbort) { + if (this._state === BAILED || this._state === ABORTING) { debug( 'run(): worker pool terminated with intent; skipping file %s', file ); } else { // this is an uncaught exception + debug('run(): encountered uncaught exception: %O', err); if (this.allowUncaught) { - debug( - 'run(): found uncaught exception with --allow-uncaught' - ); // still have to clean up - didAbort = true; + this._state = ABORTING; await pool.terminate(true); - } else { - debug('run(): found uncaught exception: %O', err); } throw err; } @@ -182,6 +225,7 @@ class BufferedRunner extends Runner { .filter(({status}) => status === 'rejected') .forEach(({reason}) => { if (this.allowUncaught) { + // yep, just the first one. throw reason; } // "rejected" will correspond to uncaught exceptions. @@ -189,15 +233,15 @@ class BufferedRunner extends Runner { this.uncaught(reason); }); + if (this._state === ABORTING) { + return; + } this.emit(EVENT_RUN_END); debug('run(): completing with failure count %d', this.failures); callback(this.failures); } catch (err) { - // this is probably from an uncaught exception and this.allowUncaught. - // Promise would trap this otherwise process.nextTick(() => { debug('run(): throwing uncaught exception'); - process.exitCode = process.exitCode || this.failures || 1; throw err; }); } finally { diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index 9680a1c789..a1c0a2fc9e 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -267,7 +267,7 @@ describe('buffered-runner', function() { runner.run( () => { expect(terminate, 'to have calls satisfying', [ - {args: [true]}, // this is the pool force-terminating + {args: []}, // this is the pool force-terminating {args: []} // this will always be called, and will do nothing due to the previous call ]).and('was called twice'); done(); @@ -339,7 +339,7 @@ describe('buffered-runner', function() { runner.run( () => { expect(terminate, 'to have calls satisfying', [ - {args: [true]}, // this is the pool force-terminating + {args: []}, // this is the pool force-terminating {args: []} // this will always be called, and will do nothing due to the previous call ]).and('was called twice'); done(); @@ -422,7 +422,7 @@ describe('buffered-runner', function() { runner.run( () => { expect(terminate, 'to have calls satisfying', [ - {args: [true]}, // this is the pool force-terminating + {args: []}, // this is the pool force-terminating {args: []} // this will always be called, and will do nothing due to the previous call ]).and('was called twice'); done(); @@ -482,7 +482,7 @@ describe('buffered-runner', function() { runner.run( () => { expect(terminate, 'to have calls satisfying', [ - {args: [true]}, // this is the pool force-terminating + {args: []}, // this is the pool force-terminating {args: []} // this will always be called, and will do nothing due to the previous call ]).and('was called twice'); done(); @@ -526,7 +526,7 @@ describe('buffered-runner', function() { runner.run( () => { expect(terminate, 'to have calls satisfying', [ - {args: [true]}, // this is the pool force-terminating + {args: []}, // this is the pool force-terminating {args: []} // this will always be called, and will do nothing due to the previous call ]).and('was called twice'); done(); From 6545108401c8cafa47142991a04a1df58da3342f Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 14:35:35 -0700 Subject: [PATCH 74/92] try to fix ESM test Signed-off-by: Christopher Hiller --- test/integration/options/parallel.spec.js | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 519e15d8fe..bfc440e8df 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -94,17 +94,27 @@ describe('--parallel', function() { }); describe('when used with ESM tests', function() { + var esmArgs = + Number(process.versions.node.split('.')[0]) >= 13 + ? [] + : ['--experimental-modules']; + before(function() { if (!utils.supportsEsModules()) this.skip(); }); it('should have the same result as with --no-parallel', function() { - this.timeout(5000); - return runMochaAsync(path.join('esm', '*.fixture.mjs'), [ - '--no-parallel' - ]).then(function(expected) { + this.timeout(Math.min(this.timeout(), 5000)); + + return runMochaAsync( + path.join('esm', '*.fixture.mjs'), + esmArgs.concat(['--no-parallel']) + ).then(function(expected) { return expect( - runMochaAsync(path.join('esm', '*.fixture.mjs'), ['--parallel']), + runMochaAsync( + path.join('esm', '*.fixture.mjs'), + esmArgs.concat(['--parallel']) + ), 'to be fulfilled with value satisfying', { passing: expected.passing, From e10b5e630564432e673966ecdaa7d43de5fd464a Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 14:36:00 -0700 Subject: [PATCH 75/92] tweak nyc --- .nycrc | 13 +++++++++++-- .travis.yml | 2 +- package-scripts.js | 4 +++- 3 files changed, 15 insertions(+), 4 deletions(-) diff --git a/.nycrc b/.nycrc index 8848fa90fd..120b7add69 100644 --- a/.nycrc +++ b/.nycrc @@ -4,7 +4,16 @@ "text-summary" ], "exclude": [ - "lib/browser", - "test/**/*" + "coverage/**", + "packages/*/test{,s}/**", + "**/*.d.ts", + "test{,s}/**", + "test{,-*}.{js,cjs,mjs,ts}", + "**/*{.,-}test.{js,cjs,mjs,ts}", + "**/__tests__/**", + "**/{karma,rollup,webpack}.config.js", + "**/{babel.config,.eslintrc,.mocharc}.{js,cjs}", + "lib/browser/**", + "package-scripts.js" ] } diff --git a/.travis.yml b/.travis.yml index 7fdae25520..eb3835315f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,7 +36,7 @@ cache: jobs: include: - - script: COVERAGE=1 npm start test.node + - script: NODE_DEBUG=nyc COVERAGE=1 npm start test.node after_success: npm start coveralls - &node diff --git a/package-scripts.js b/package-scripts.js index 1a79fd05c1..be5e448baf 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -10,9 +10,11 @@ const path = require('path'); * @returns {string} Command string to be executed by nps. */ function test(testName, mochaParams) { - const coverageCommand = `nyc --no-clean --report-dir coverage/reports/${testName}`; + let coverageCommand = `nyc --no-clean --report-dir coverage/reports/${testName}`; const mochaCommand = `node ${path.join('bin', 'mocha')}`; // Include 'node' and path.join for Windows compatibility if (process.env.CI) { + // suppress coverage summaries in CI to reduce noise + coverageCommand += ' --reporter=json'; if (!/^only-/.test(testName)) { mochaParams += ' --forbid-only'; } From fdf9916ae1cea0b88f6bac291eac5a8a6a4e2145 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 14:57:23 -0700 Subject: [PATCH 76/92] add warning if no multiple cores Signed-off-by: Christopher Hiller --- lib/buffered-runner.js | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index cea850dbbb..abeead0af9 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -108,7 +108,12 @@ class BufferedRunner extends Runner { let pool; try { - const jobs = options.jobs || os.cpus().length - 1; + if (os.cpus().length === 1) { + process.emitWarning( + `(Mocha) only one (1) CPU core available; avoid --parallel on this machine` + ); + } + const jobs = Math.min(1, options.jobs || os.cpus().length - 1); debug('run(): starting pool with %d max workers', jobs); pool = workerpool.pool(WORKER_PATH, { workerType: 'process', From d8c7fbca61d735e8364e701229e0fb4bf18484bd Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 14:57:35 -0700 Subject: [PATCH 77/92] get some debug info Signed-off-by: Christopher Hiller --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index eb3835315f..6d0033815a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,7 +36,7 @@ cache: jobs: include: - - script: NODE_DEBUG=nyc COVERAGE=1 npm start test.node + - script: DEBUG=mocha:parallel* COVERAGE=1 npm start test.node after_success: npm start coveralls - &node From e23b4e21d732a3d0dfafd21619cf890b18b771db Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 15:29:39 -0700 Subject: [PATCH 78/92] fix default job count, job tests --- lib/buffered-runner.js | 5 +++-- lib/cli/run.js | 2 +- test/node-unit/buffered-runner.spec.js | 17 ++++++++++++----- 3 files changed, 16 insertions(+), 8 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index abeead0af9..3dfab43e7b 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -108,12 +108,13 @@ class BufferedRunner extends Runner { let pool; try { - if (os.cpus().length === 1) { + const cpuCount = os.cpus().length; + if (cpuCount === 1) { process.emitWarning( `(Mocha) only one (1) CPU core available; avoid --parallel on this machine` ); } - const jobs = Math.min(1, options.jobs || os.cpus().length - 1); + const jobs = Math.max(1, options.jobs || cpuCount - 1); debug('run(): starting pool with %d max workers', jobs); pool = workerpool.pool(WORKER_PATH, { workerType: 'process', diff --git a/lib/cli/run.js b/lib/cli/run.js index 2d5448f33e..0ac61038e6 100644 --- a/lib/cli/run.js +++ b/lib/cli/run.js @@ -158,7 +158,7 @@ exports.builder = yargs => defaultDescription: '(number of CPU cores - 1)', requiresArg: true, group: GROUPS.RULES, - default: Math.min(2, coreCount - 1) + default: Math.max(2, coreCount - 1) }, 'list-interfaces': { conflicts: Array.from(ONE_AND_DONE_ARGS), diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index a1c0a2fc9e..e218a2f595 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -1,6 +1,5 @@ 'use strict'; -const os = require('os'); const { EVENT_RUN_BEGIN, EVENT_TEST_PASS, @@ -21,10 +20,11 @@ describe('buffered-runner', function() { let terminate; let BufferedRunner; let suite; + let cpuCount; beforeEach(function() { sandbox = createSandbox(); - + cpuCount = 1; suite = new Suite('a root suite', {}, true); // tests will want to further define the behavior of these. @@ -41,6 +41,9 @@ describe('buffered-runner', function() { BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, () => ({ workerpool: { pool + }, + os: { + cpus: sandbox.stub().callsFake(() => new Array(cpuCount)) } })); }); @@ -149,7 +152,7 @@ describe('buffered-runner', function() { args: [ expect.it('to be a', 'string'), { - maxWorkers: os.cpus().length - 1 + maxWorkers: Math.max(cpuCount - 1, 1) } ] }); @@ -161,6 +164,10 @@ describe('buffered-runner', function() { }); describe('when provided a max job count', function() { + beforeEach(function() { + cpuCount = 8; + }); + it('should use the provided max count', function(done) { runner.run( () => { @@ -168,7 +175,7 @@ describe('buffered-runner', function() { args: [ expect.it('to be a', 'string'), { - maxWorkers: 2 + maxWorkers: 4 } ] }); @@ -177,7 +184,7 @@ describe('buffered-runner', function() { { files: [], options: { - jobs: 2 + jobs: 4 } } ); From 52368a9d8dc267def6786651b34494c3d8659c91 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 15:41:19 -0700 Subject: [PATCH 79/92] fix test timeouts in slow parallel tests Signed-off-by: Christopher Hiller --- test/integration/options/parallel.spec.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index bfc440e8df..2c59f47e5c 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -7,7 +7,7 @@ var invokeMochaAsync = helpers.invokeMochaAsync; var utils = require('../../../lib/utils'); function compareReporters(reporter) { - this.timeout(5000); + this.timeout(Math.max(this.timeout(), 5000)); return runMochaAsync(path.join('options', 'parallel', 'test-a.fixture.js'), [ '--reporter', reporter, @@ -72,7 +72,7 @@ describe('--parallel', function() { describe('when used with CJS tests', function() { it('should have the same result as with --no-parallel', function() { - this.timeout(5000); + this.timeout(Math.max(this.timeout(), 5000)); return runMochaAsync( path.join('options', 'parallel', 'test-*.fixture.js'), ['--no-parallel'] @@ -264,7 +264,7 @@ describe('--parallel', function() { it('should have the same result as when run with --no-parallel', function() { // note that the output may not be in the same order, as running file // order is non-deterministic in parallel mode - this.timeout(5000); + this.timeout(Math.max(this.timeout(), 5000)); return runMochaAsync( path.join('options', 'parallel', 'test-*.fixture.js'), ['--reporter', reporter, '--no-parallel'] From a509d1c93dcde2d8795b1a17dd7ac87d26f478cd Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 15:41:32 -0700 Subject: [PATCH 80/92] reduce debug noise from worker Signed-off-by: Christopher Hiller --- lib/worker.js | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/lib/worker.js b/lib/worker.js index 0feb4b6f96..32cfaa246d 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -86,18 +86,17 @@ async function run(filepath, argv = {ui: 'bdd'}) { // Runner adds these; if we don't remove them, we'll get a leak. process.removeAllListeners('uncaughtException'); - debug( - 'run(): completed run with %d test failures', - typeof result.failures === 'number' ? result.failures : 0 - ); try { const serialized = serialize(result); - debug('run(): returning to main process'); + debug( + 'run(): completed run with %d test failures; returning to main process', + typeof result.failures === 'number' ? result.failures : 0 + ); resolve(serialized); } catch (err) { // TODO: figure out exactly what the sad path looks like here. // rejection should only happen if an error is "unrecoverable" - debug('run(): rejecting: %O', err); + debug('run(): serialization failed; rejecting: %O', err); reject(err); } finally { clearInterval(t); From 18f70300f41ca9129777a84fc02a50e5e75a332a Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 16:05:42 -0700 Subject: [PATCH 81/92] fix ESM parallel tests exposes `getSummary()` from integration test helpers --- test/integration/helpers.js | 2 ++ test/integration/options/parallel.spec.js | 35 ++++++++++++----------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/test/integration/helpers.js b/test/integration/helpers.js index 918b0b4198..8ebe1ae42f 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -43,6 +43,8 @@ module.exports = { invokeNode: invokeNode, + getSummary: getSummary, + /** * Resolves the path to a fixture to the full path. */ diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index 2c59f47e5c..d860e35429 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -4,6 +4,7 @@ var path = require('path'); var helpers = require('../helpers'); var runMochaAsync = helpers.runMochaAsync; var invokeMochaAsync = helpers.invokeMochaAsync; +var getSummary = helpers.getSummary; var utils = require('../../../lib/utils'); function compareReporters(reporter) { @@ -106,23 +107,23 @@ describe('--parallel', function() { it('should have the same result as with --no-parallel', function() { this.timeout(Math.min(this.timeout(), 5000)); - return runMochaAsync( - path.join('esm', '*.fixture.mjs'), - esmArgs.concat(['--no-parallel']) - ).then(function(expected) { - return expect( - runMochaAsync( - path.join('esm', '*.fixture.mjs'), - esmArgs.concat(['--parallel']) - ), - 'to be fulfilled with value satisfying', - { - passing: expected.passing, - failing: expected.failing, - pending: expected.pending, - code: expected.code - } - ); + var args = [ + path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs') + ].concat(esmArgs); + return invokeMochaAsync(args.concat('--no-parallel'))[1].then(function( + expected + ) { + var expectedSummary = getSummary(expected); + return invokeMochaAsync(args.concat('--parallel'))[1].then(function( + actual + ) { + var actualSummary = getSummary(actual); + expect(actualSummary, 'to satisfy', { + pending: expectedSummary.pending, + passing: expectedSummary.passing, + failing: expectedSummary.failing + }); + }); }); }); }); From cba2d7ef6a10c6b2588624acae54e213a7fe638c Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Thu, 16 Apr 2020 16:58:50 -0700 Subject: [PATCH 82/92] fix node arg passing to worker processes warnings about too many jobs requested; add test --- lib/buffered-runner.js | 24 +++++++++---- test/node-unit/buffered-runner.spec.js | 47 ++++++++++++++++++++++++-- 2 files changed, 62 insertions(+), 9 deletions(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 3dfab43e7b..ca1a9fcd9b 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -9,7 +9,7 @@ const workerpool = require('workerpool'); const {deserialize} = require('./serializer'); const WORKER_PATH = require.resolve('./worker.js'); const {setInterval, clearInterval} = global; -const {createMap} = require('./utils'); +const {createMap, warn} = require('./utils'); const debugStats = pool => { const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); debug( @@ -109,16 +109,26 @@ class BufferedRunner extends Runner { try { const cpuCount = os.cpus().length; - if (cpuCount === 1) { - process.emitWarning( - `(Mocha) only one (1) CPU core available; avoid --parallel on this machine` + const maxJobs = cpuCount - 1; + const jobs = Math.max(1, Math.min(options.jobs || maxJobs, maxJobs)); + if (maxJobs < 2) { + warn( + `(Mocha) not enough CPU cores available (${cpuCount}) to run multiple jobs; avoid --parallel on this machine` + ); + } else if (options.jobs && options.jobs > maxJobs) { + warn( + `(Mocha) ${options.jobs} concurrent jobs requested, but only enough cores available for ${maxJobs}` ); } - const jobs = Math.max(1, options.jobs || cpuCount - 1); - debug('run(): starting pool with %d max workers', jobs); + debug( + 'run(): starting worker pool of size %d, using node args: %s', + jobs, + process.execArgv.join(' ') + ); pool = workerpool.pool(WORKER_PATH, { workerType: 'process', - maxWorkers: jobs + maxWorkers: jobs, + forkOpts: {execArgv: process.execArgv} }); sigIntListener = async () => { diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index e218a2f595..0330c62241 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -20,12 +20,14 @@ describe('buffered-runner', function() { let terminate; let BufferedRunner; let suite; + let warn; let cpuCount; beforeEach(function() { sandbox = createSandbox(); cpuCount = 1; suite = new Suite('a root suite', {}, true); + warn = sandbox.stub(); // tests will want to further define the behavior of these. run = sandbox.stub(); @@ -38,13 +40,14 @@ describe('buffered-runner', function() { terminate, stats: sandbox.stub().returns({}) }); - BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, () => ({ + BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, r => ({ workerpool: { pool }, os: { cpus: sandbox.stub().callsFake(() => new Array(cpuCount)) - } + }, + [require.resolve('../../lib/utils')]: r.with({warn}).callThrough() })); }); @@ -189,6 +192,46 @@ describe('buffered-runner', function() { } ); }); + + describe('when the max job count exceeds the CPU count', function() { + it('should warn', function(done) { + run.resolves({failureCount: 0, events: []}); + runner.run( + () => { + expect(warn, 'to have a call satisfying', [ + /only enough cores available/ + ]); + done(); + }, + { + files: [], + options: {jobs: 16} + } + ); + }); + }); + + describe('when there are not enough CPU cores', function() { + beforeEach(function() { + cpuCount = 2; + }); + + it('should warn', function(done) { + run.resolves({failureCount: 0, events: []}); + runner.run( + () => { + expect(warn, 'to have a call satisfying', [ + /avoid --parallel on this machine/ + ]); + done(); + }, + { + files: [], + options: {jobs: 4} + } + ); + }); + }); }); describe('when suite should bail', function() { From 4ad404fad1753e95b9dccf52cd66e34268415e8e Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 17 Apr 2020 15:30:12 -0700 Subject: [PATCH 83/92] fix string that should have been a var in BufferedRunner --- lib/buffered-runner.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index ca1a9fcd9b..56ab2c7272 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -146,7 +146,7 @@ class BufferedRunner extends Runner { } finally { process.nextTick(() => { debug('run(): imminent death'); - this._state = 'ABORTED'; + this._state = ABORTED; process.kill(process.pid, 'SIGINT'); }); } From 7e2717bd050ecfd07e87dba6e0302325d410ab3d Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 17 Apr 2020 15:31:03 -0700 Subject: [PATCH 84/92] fix --require integration test mainly fiddling with regex's. `[^]` is apparently deprecated (?), so use `[\s\S]` instead. --- test/integration/options/require.spec.js | 66 ++++++++++++++---------- 1 file changed, 39 insertions(+), 27 deletions(-) diff --git a/test/integration/options/require.spec.js b/test/integration/options/require.spec.js index a6c8ef7ea2..7e3204143e 100644 --- a/test/integration/options/require.spec.js +++ b/test/integration/options/require.spec.js @@ -3,8 +3,8 @@ var invokeMochaAsync = require('../helpers').invokeMochaAsync; describe('--require', function() { - describe('when run in serial', function() { - it('should allow registration of root hooks via mochaHooks object export', function() { + describe('when mocha run in serial mode', function() { + it('should run root hooks when provided via mochaHooks object export', function() { return expect( invokeMochaAsync([ '--require=' + @@ -21,15 +21,11 @@ describe('--require', function() { ])[1], 'when fulfilled', 'to contain output', - /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\n/ - ).and( - 'when fulfilled', - 'to contain output', - /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ + /beforeAll[\s\S]+?beforeAll array 1[\s\S]+?beforeAll array 2[\s\S]+?beforeEach[\s\S]+?beforeEach array 1[\s\S]+?beforeEach array 2[\s\S]+?afterEach[\s\S]+?afterEach array 1[\s\S]+?afterEach array 2[\s\S]+?afterAll[\s\S]+?afterAll array 1[\s\S]+?afterAll array 2/ ); }); - it('should allow registration of root hooks via mochaHooks function export', function() { + it('should run root hooks when provided via mochaHooks function export', function() { return expect( invokeMochaAsync([ '--require=' + @@ -46,17 +42,13 @@ describe('--require', function() { ])[1], 'when fulfilled', 'to contain output', - /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2\n/ - ).and( - 'when fulfilled', - 'to contain output', - /afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2\n/ + /beforeAll[\s\S]+?beforeAll array 1[\s\S]+?beforeAll array 2[\s\S]+?beforeEach[\s\S]+?beforeEach array 1[\s\S]+?beforeEach array 2[\s\S]+?afterEach[\s\S]+?afterEach array 1[\s\S]+?afterEach array 2[\s\S]+?afterAll[\s\S]+?afterAll array 1[\s\S]+?afterAll array 2/ ); }); }); - describe('when run with --parallel', function() { - it('should allow registration of root hooks', function() { + describe('when mocha in parallel mode', function() { + it('should run root hooks when provided via mochaHooks object exports', function() { return expect( invokeMochaAsync([ '--require=' + @@ -74,37 +66,57 @@ describe('--require', function() { ])[1], 'when fulfilled', 'to contain output', - /beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2[^]+afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2/ + /beforeAll[\s\S]+?beforeAll array 1[\s\S]+?beforeAll array 2[\s\S]+?beforeEach[\s\S]+?beforeEach array 1[\s\S]+?beforeEach array 2[\s\S]+?afterEach[\s\S]+?afterEach array 1[\s\S]+?afterEach array 2[\s\S]+?afterAll[\s\S]+?afterAll array 1[\s\S]+?afterAll array 2/ ); }); - it('should run root hooks for each job', function() { + it('should run root hooks when provided via mochaHooks function export', function() { return expect( invokeMochaAsync([ '--require=' + require.resolve( - '../fixtures/options/require/root-hook-defs-a.fixture.js' + '../fixtures/options/require/root-hook-defs-c.fixture.js' ), '--require=' + require.resolve( - '../fixtures/options/require/root-hook-defs-b.fixture.js' + '../fixtures/options/require/root-hook-defs-d.fixture.js' ), '--parallel', require.resolve( '../fixtures/options/require/root-hook-test.fixture.js' - ), - require.resolve( - '../fixtures/options/require/root-hook-test-2.fixture.js' ) ])[1], 'when fulfilled', 'to contain output', - /(beforeAll\nbeforeAll array 1\nbeforeAll array 2\nbeforeEach\nbeforeEach array 1\nbeforeEach array 2[^]+){2}/ - ).and( - 'when fulfilled', - 'to contain output', - /(afterEach\nafterEach array 1\nafterEach array 2\nafterAll\nafterAll array 1\nafterAll array 2[^]+){2}/ + /beforeAll[\s\S]+?beforeAll array 1[\s\S]+?beforeAll array 2[\s\S]+?beforeEach[\s\S]+?beforeEach array 1[\s\S]+?beforeEach array 2[\s\S]+?afterEach[\s\S]+?afterEach array 1[\s\S]+?afterEach array 2[\s\S]+?afterAll[\s\S]+?afterAll array 1[\s\S]+?afterAll array 2/ ); }); + + describe('when running multiple jobs', function() { + it('should run root hooks when provided via mochaHooks object exports for each job', function() { + return expect( + invokeMochaAsync([ + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-a.fixture.js' + ), + '--require=' + + require.resolve( + '../fixtures/options/require/root-hook-defs-b.fixture.js' + ), + '--parallel', + require.resolve( + '../fixtures/options/require/root-hook-test.fixture.js' + ), + require.resolve( + '../fixtures/options/require/root-hook-test-2.fixture.js' + ) + ])[1], + 'when fulfilled', + 'to contain output', + /(?:beforeAll[\s\S]+?beforeAll array 1[\s\S]+?beforeAll array 2[\s\S]+?beforeEach[\s\S]+?beforeEach array 1[\s\S]+?beforeEach array 2[\s\S]+?afterEach[\s\S]+?afterEach array 1[\s\S]+?afterEach array 2[\s\S]+?afterAll[\s\S]+?afterAll array 1[\s\S]+?afterAll array 2[\s\S]+?){2}/ + ); + }); + }); }); }); From 25d57ec0f7f20982a2823816a99dc6d2075329ae Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 17 Apr 2020 15:46:53 -0700 Subject: [PATCH 85/92] remove debugging from travis CI --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 6d0033815a..7fdae25520 100644 --- a/.travis.yml +++ b/.travis.yml @@ -36,7 +36,7 @@ cache: jobs: include: - - script: DEBUG=mocha:parallel* COVERAGE=1 npm start test.node + - script: COVERAGE=1 npm start test.node after_success: npm start coveralls - &node From e9340306e552090d39eeb716faa115acc57ed8b9 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 17 Apr 2020 16:08:21 -0700 Subject: [PATCH 86/92] add a serial-mode run (and job names?) Signed-off-by: Christopher Hiller --- .travis.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 7fdae25520..61d6c25a79 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,15 +38,22 @@ jobs: include: - script: COVERAGE=1 npm start test.node after_success: npm start coveralls + name: 'Latest Node.js (with coverage)' + + - script: MOCHA_PARALLEL=0 npm start test.node.unit + name: 'Latest Node.js (in serial mode)' - &node script: npm start test.node node_js: '12' + name: 'Node.js v12' - <<: *node node_js: '10' + name: 'Node.js v10' - script: npm start test.bundle test.browser + name: 'Browser' node_js: 12 install: npm ci # we need the native modules here addons: @@ -59,6 +66,7 @@ jobs: - stage: lint script: npm start lint + name: 'JS & Markdown' # smoke tests use default npm. - &smoke @@ -66,7 +74,7 @@ jobs: env: null before_install: true install: npm install --production - + name: 'Latest Node.js' script: ./bin/mocha --no-config --reporter spec test/sanity/sanity.spec.js cache: directories: @@ -75,12 +83,15 @@ jobs: - <<: *smoke node_js: '12' + name: 'Node.js v12' - <<: *smoke node_js: '10' + name: 'Node.js v10' - stage: precache script: true + name: 'Prime cache' notifications: email: false From 4328619405030bb0530f48198bf3472d76ff4b0a Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Fri, 17 Apr 2020 16:08:41 -0700 Subject: [PATCH 87/92] update package-scripts.js to respect MOCHA_PARALLEL=0 --- package-scripts.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/package-scripts.js b/package-scripts.js index be5e448baf..cdb2556d16 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -20,6 +20,10 @@ function test(testName, mochaParams) { } mochaParams += ' --color'; } + // this may _actually_ be supported in the future + if (process.env.MOCHA_PARALLEL === '0') { + mochaParams += ' --no-parallel'; + } return `${ process.env.COVERAGE ? coverageCommand : '' } ${mochaCommand} ${mochaParams}`.trim(); From 80d5c069a0ceba0d8983cfc50c980f56d927f5d3 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 20 Apr 2020 16:48:51 -0700 Subject: [PATCH 88/92] allow emoji on website via markdown-it-emoji --- .eleventy.js | 2 ++ package-lock.json | 6 ++++++ package.json | 1 + 3 files changed, 9 insertions(+) diff --git a/.eleventy.js b/.eleventy.js index 934e19bb89..3836c6999e 100644 --- a/.eleventy.js +++ b/.eleventy.js @@ -31,6 +31,8 @@ module.exports = function(eleventyConfig) { markdown.use(require('markdown-it-prism')); + markdown.use(require('markdown-it-emoji')); + eleventyConfig.setLibrary('md', markdown); return { diff --git a/package-lock.json b/package-lock.json index aa583fda7e..e9e5190681 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10436,6 +10436,12 @@ "integrity": "sha512-q45vdXU9TSWaHgFkWEFM97YHEoCmOyG9csLLdv3oVC6ARjT77u4wfng9rRtSOMb5UpxzT7zTX5GBbwm15H40dw==", "dev": true }, + "markdown-it-emoji": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/markdown-it-emoji/-/markdown-it-emoji-1.4.0.tgz", + "integrity": "sha1-m+4OmpkKljupbfaYDE/dsF37Tcw=", + "dev": true + }, "markdown-it-prism": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/markdown-it-prism/-/markdown-it-prism-2.0.5.tgz", diff --git a/package.json b/package.json index 25093d4028..3a712a4870 100644 --- a/package.json +++ b/package.json @@ -104,6 +104,7 @@ "markdown-it": "^10.0.0", "markdown-it-anchor": "^5.2.5", "markdown-it-attrs": "^3.0.2", + "markdown-it-emoji": "^1.4.0", "markdown-it-prism": "^2.0.5", "markdown-magic": "^1.0.0", "markdown-magic-package-json": "^2.0.1", From da44ea1bd67c373f1e7b9de02bfd8cc2f022587e Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 20 Apr 2020 16:49:50 -0700 Subject: [PATCH 89/92] squelch warning about missing language in a fenced code block markdownlint wants a language or fails, but prism doesn't know what to do with `text` or `plain`, so let's just forget it. --- .markdownlint.json | 3 ++- scripts/markdown-magic.config.js | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.markdownlint.json b/.markdownlint.json index 14a5c306e6..5eab24dc58 100644 --- a/.markdownlint.json +++ b/.markdownlint.json @@ -6,5 +6,6 @@ "first-header-h1": false, "first-line-h1": false, "commands-show-output": false, - "single-h1": false + "single-h1": false, + "fenced-code-language": false } diff --git a/scripts/markdown-magic.config.js b/scripts/markdown-magic.config.js index f75740a264..f800a5c9f0 100644 --- a/scripts/markdown-magic.config.js +++ b/scripts/markdown-magic.config.js @@ -20,7 +20,7 @@ exports.transforms = { usage: (content, options) => { const {executable} = options; const flag = options.flag || '--help'; - const header = options.header || '\n```text'; + const header = options.header || '\n```'; const footer = options.footer || '```\n'; const output = stripAnsi( String( From f5a8a761214999cc6c056c455b3145db3982c651 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 20 Apr 2020 16:50:12 -0700 Subject: [PATCH 90/92] website: add styles for a "normal" single-column ul --- docs/css/style.css | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/css/style.css b/docs/css/style.css index d4b0ab8600..39fefea91e 100644 --- a/docs/css/style.css +++ b/docs/css/style.css @@ -171,6 +171,13 @@ ul { padding: 0 15px; } +ul.single-column, +ul.single-column > li > ul { + column-count: 1; + margin-top: 0; + padding-right: 0; +} + ul li { border-bottom: 1px solid #eee; break-inside: avoid; @@ -179,6 +186,17 @@ ul li { padding: 5px 0; } +ul.single-column li, +ul.single-column li > ul > li { + list-style: disc; + padding: 0; + border-bottom: 0; +} + +ul.single-column li > ul > li { + list-style: circle; +} + code { font: 14px monaco, monospace; line-height: 1.8; From c8236d54ae79e5a9e003fb476ea8647f08f75b3c Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 20 Apr 2020 16:50:36 -0700 Subject: [PATCH 91/92] add docs for parallel mode and root hook plugins --- docs/index.md | 321 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 316 insertions(+), 5 deletions(-) diff --git a/docs/index.md b/docs/index.md index aeaa1c318c..ed3fec9d2f 100644 --- a/docs/index.md +++ b/docs/index.md @@ -19,11 +19,11 @@ Mocha is a feature-rich JavaScript test framework running on [Node.js][] and in - [browser support](#running-mocha-in-the-browser) - [simple async support, including promises](#asynchronous-code) +- [run Node.js tests in parallel](#parallel-tests) - [test coverage reporting](#wallabyjs) - [string diff support](#diffs) -- [javascript API for running tests](#more-information) -- proper exit status for CI support etc -- [auto-detects and disables coloring for non-ttys](#reporters) +- [JavaScript API for running tests](#more-information) +- [auto-detects and disables coloring for non-TTYs](#reporters) - [async test timeout support](#delayed-root-suite) - [test retry support](#retry-tests) - [test-specific timeouts](#test-level) @@ -36,7 +36,6 @@ Mocha is a feature-rich JavaScript test framework running on [Node.js][] and in - [auto-exit to prevent "hanging" with an active loop](#-exit) - [easily meta-generate suites](#markdown) & [test-cases](#list) - [config file support](#-config-path) -- clickable suite titles to filter test execution - [node debugger support](#-inspect-inspect-brk-inspect) - [node native ES modules support](#nodejs-native-esm-support) - [detects multiple calls to `done()`](#detects-multiple-calls-to-done) @@ -68,6 +67,8 @@ Mocha is a feature-rich JavaScript test framework running on [Node.js][] and in - [Timeouts](#timeouts) - [Diffs](#diffs) - [Command-Line Usage](#command-line-usage) +- [Parallel Tests](#parallel-tests) +- [Root-Level Hook Plugins](#root-level-hook-plugins) - [Interfaces](#interfaces) - [Reporters](#reporters) - [Node.JS native ESM support](#nodejs-native-esm-support) @@ -464,6 +465,8 @@ describe('Array', function() { Pending tests will be included in the test results, and marked as pending. A pending test is not considered a failed test. +Read the [inclusive tests section](#inclusive-tests) for an example of conditionally marking a test as pending via `this.skip()`. + ## Exclusive Tests The exclusivity feature allows you to run _only_ the specified suite or test-case @@ -820,7 +823,7 @@ Mocha supports the `err.expected` and `err.actual` properties of any thrown `Ass -```text +``` mocha [spec..] @@ -841,6 +844,10 @@ Rules & Behavior --forbid-only Fail if exclusive test(s) encountered [boolean] --forbid-pending Fail if pending test(s) encountered [boolean] --global, --globals List of allowed global variables [array] + --jobs, -j Number of concurrent jobs for --parallel; use 1 to + run in serial + [number] [default: (number of CPU cores - 1)] + --parallel, -p Run tests in parallel [boolean] --retries Retry failed tests this many times [number] --slow, -s Specify "slow" test threshold (in milliseconds) [string] [default: 75] @@ -1211,6 +1218,28 @@ All of these options are mutually exclusive. Implies `--no-timeout`. +### `--parallel, -p` + +> _New in v.8.0.0._ + +Use the `--parallel` flag to run tests in a worker pool. + +Each test file will be put into a queue and executed as workers become available. + +**NOTICE**: `--parallel` has certain implications for Mocha's behavior which you must be aware of. Read more about [running tests in parallel](#parallel-tests). + +### `--jobs , -j ` + +> _New in v.8.0.0._ + +Use `--jobs ` to specify the _maximum_ number of processes in the worker pool. + +The default value is the _number of CPU cores_ less 1. + +Hint: Use `--jobs 0` or `--jobs 1` to temporarily disable `--parallel`. + +Has no effect unless used with [`--parallel`](#-parallel-p). + ### About Option Types > _Updated in v6.0.0._ @@ -1233,6 +1262,288 @@ Prepend `--v8-` to any flag listed in the output of `node --v8-options` (excludi V8 flags can be defined in Mocha's [configuration](#configuring-mocha-nodejs). +## Parallel Tests + +> _New in v.8.0.0._ + +Depending on the number and nature of your tests, you may find a significant performance benefit when running tests in parallel (using the [`--parallel`](#-parallel-p) flag). + +Parallel tests should work out-of-the box for well-isolated unit tests. However, you must be aware of some important implications of the behavior. + +> _Note: Authors of third-party libraries built on top of Mocha should especially read this!_ + +### Reporter Limitations + +Due to the nature of the following reporters, they cannot work when running tests in parallel: + +- [`markdown`](#markdown) +- [`progress`](#progress) +- [`json-stream`](#json-stream) + {:.single-column} + +These reporters expect Mocha to know _how many tests it plans to run_ before execution. This information is unavailable in parallel mode, as test files are loaded only when they are about to be run. + +In serial mode, tests results will "stream" as they occur. In parallel mode, reporter output is _buffered_; reporting will occur after each file is completed. In practice, the reporter output will appear in "chunks" (but will otherwise be identical). + +### Order is Non-Deterministic + +In parallel mode, we have no guarantees about the order in which test files will be run--or what process runs them--as it depends on the execution times of the test files. + +Because of this, the following options _cannot be used_ in parallel mode: + +- [`--file`](#--file--file-directory-glob-) +- [`--sort`](#--sort) +- [`--delay`](#--delay) + {:.single-column} + +### Test Duration Variability + +Because running tests in parallel mode uses more system resources at once, the OS may take extra time to schedule and complete some operations. For this reason, test timeouts may need to be increased either [globally](#--timeout--ms----t--ms-) or [otherwise](#timeouts). + +### "Bail" is "Best Effort" + +When used with `--bail` (or `this.bail()`) to exit after the first failure, it's likely other tests will be running at the same time. Mocha must shut down its worker processes before exiting. + +Likewise, subprocesses may throw uncaught exceptions. When used with `--allow-uncaught`, Mocha will "bubble" this exception to the main process, but still must shut down its processes. + +### Root Hooks Are Not Global + +> _NOTE: This only applies to test files run parallel mode_. + +A root-level hook is a hook in a test file which is _not defined_ within a suite. An example using the `bdd` interface: + +```js +// test/setup.js +beforeEach(function() { + doMySetup(); +}); + +afterEach(function() { + doMyTeardown(); +}); +``` + +When run (in the default "serial" mode) via `mocha --file "./test/setup.js" "./test/**/*.spec.js"`, `setup.js` will be executed _first_, and install the two hooks shown above for every test found in `./test/**/*.spec.js`. + +**When Mocha runs in parallel mode, test files do not share the same process.** Consequently, a root-level hook defined in test file _A_ won't be present in test file _B_. + +There are a (minimum of) two workarounds for this: + +1. `require('./setup.js')` or `import './setup.js'` at the top of every test file. Best avoided for those averse to boilerplate. +1. _Recommended_: Define root-level hooks in a required file, using the new (also as of v8.0.0) [Root Hook Plugin](#root-level-hook-plugins) system. + +### No Browser Support + +Parallel mode is only available in Node.js. + +### Migration Checklist + +If you find your tests don't work properly when run with [`--parallel`](#-parallel-p), either shrug and move on, or use this handy-dandy checklist to get things working: + +- :white_check_mark: Ensure you are using a [supported reporter](#reporter-limitations). +- :white_check_mark: Ensure you are not using [other unsupported flags](#order-is-non-deterministic). +- :white_check_mark: Double-check your [config file](#configuring-mocha-nodejs); options set in config files will be merged with any command-line option. +- :white_check_mark: Look for root-level hooks (they look like [this](#root-hooks-are-not-global)) in your tests. Move them into a [root-level hook plugin](#root-hook-plugins). +- :white_check_mark: Do any assertion, mock, or other test libraries you're consuming use root-level hooks? They may need to be [migrated](#migrating-a-library-to-use-root-hook-plugins) for compatibility with parallel mode. +- :white_check_mark: If tests are unexpectedly timing out, you may need to increase the default test timeout (via [`--timeout`](#-timeout-ms-t-ms)) +- :white_check_mark: Ensure your tests do not depend on being run in a specific order. +- :white_check_mark: Ensure your tests clean up after themselves; remove temp files, handles, sockets, etc. Don't try to share state or resources between test files. + +### Caveats About Testing in Parallel + +Some types of tests are _not_ so well-suited to run in parallel. For example, extremely timing-sensitive tests, or tests which make I/O requests to a limited pool of resources (such as opening ports, or automating browser windows, hitting a test DB, or remote server, etc.). + +Free-tier cloud CI services may not provide a suitable multi-core container or VM for their build agents. Regarding expected performance gains in CI: your mileage may vary. It may help to use a conditional in a `.mocharc.js` to check for `process.env.CI`, and adjust the job count as appropriate. + +It's unlikely (but not impossible) to see a performance gain from a [job count](#-jobs-count-j-count) _greater than_ the number of available CPU cores. That said, _play around with the job count_--there's no one-size-fits all, and the unique characteristics of your tests will determine the optimal number of jobs; it may even be that fewer is faster! + +## Root Hook Plugins + +> _New in v8.0.0._ + +In some cases, you may want to execute a [hook](#hooks) before (or after) every test in every file. Previous to v8.0.0, the way to accomplish this was to use `--file` combined with root-level hooks (see [example above](#root-level-hooks-are-not-global)). This still works in v8.0.0, but _not_ when running tests in parallel mode! + +A Root-Level Hook Plugin is a JavaScript file loaded via [`--require`](#-require-module-r-module) which "registers" one or more root-level hooks to be used across all test files. + +### Defining a Root Hook Plugin + +A Root Hook Plugin file is a script which exports a `mochaHooks` property. + +Here's a simple example, which defines a hook to run before every test (in every file). Use it via `--require test/hooks.js`: + +```js +// test/hooks.js + +exports.mochaHooks = { + beforeEach(done) { + // do something before every test + done(); + } +}; +``` + +`beforeEach`--as you may have guessed--corresponds to a `beforeEach` in the default [`bdd`](#bdd) interface. This works with other interfaces too, _but the property names are always as follows_: + +- `beforeAll`: + - In **serial** mode (the default), run _once_, before any tests begin + - In **parallel** mode, run in _each file_, before any tests begin +- `beforeEach`: + - In both **serial and parallel** modes, run _before every test_ +- `afterAll`: + - In **serial** mode (the default), run _once_, after all tests end + - In **parallel** mode, run in _each file_, after all tests end +- `afterEach`: + - In both **serial and parallel** modes, run _after every test_ + +{:.single-column} + +Note that these hook callbacks run in the usual context, so `this` is available: + +```js +exports.mochaHooks = { + beforeAll() { + // skip all tests for bob + if (require('os').userInfo().username === 'bob') { + return this.skip(); + } + } +}; +``` + +### Multiple Root Hooks in a Single Plugin + +Multiple root hooks can be defined in a single plugin, for organizational purposes. For example: + +```js +exports.mochaHooks = { + beforeEach: [ + function(done) { + // do something before every test, + // then run the next hook in this array + }, + async function() { + // async or Promise-returning functions allowed + } + ] +}; +``` + +### Root Hook Plugins Can Export a Function + +If you need to perform some logic--such as choosing a root hook conditionally, based on the environment--`mochaHooks` can be a _function_ which returns the expected object. + +```js +exports.mochaHooks = () => { + if (process.env.CI) { + // root hooks object + return { + beforeEach: [ + function() { + // CI-specific beforeEach + }, + function() { + // some other CI-specific beforeEach + } + ] + }; + } + // root hooks object + return { + beforeEach() { + // regular beforeEach + } + }; +}; +``` + +If you need to perform an async operation, `mochaHooks` can be `Promise`-returning: + +```js +exports.mochaHooks = async () => { + const result = await checkSomething(); + // only use a root hook if `result` is truthy + if (result) { + // root hooks object + return { + beforeEach() { + // something + } + }; + } +}; +``` + +### Multiple Root Hook Plugins + +Multiple root hook plugins can be registered by using `--require` multiple times. For example, to register the root hooks in `hooks-a.js` and `hooks-b.js`, use `--require hooks-a.js --require hooks-b.js`. These will be registered (and run) _in order_. + +### Migrating Tests to use Root Hook Plugins + +To migrate your tests using root hooks to a root hook plugin: + +1. Find your root hooks (hooks defined outside of a suite--a `describe()` callback). +1. Create a new file, e.g., `test/hooks.js`. +1. _Move_ your root hooks into `test/hooks.js`. +1. In `test/hooks.js`, make your hooks a member of an exported `mochaHooks` property. +1. Use `--require test/hooks.js` (even better: use a [config file](#configuring-mocha-nodejs)) when running your tests. + +For example, given the following file, `test/test.spec.js`, containing root hooks: + +```js +// test/test.spec.js + +beforeEach(function() { + // global setup for all tests +}); + +after(function() { + // one-time final cleanup +}); + +describe('my test suite', function() { + it('should have run my global setup', function() { + // make assertion + }); +}); +``` + +Your `test/hooks.js` should contain: + +```js +// test/hooks.js + +exports.mochaHooks = { + beforeEach(function() { + // global setup for all tests + }), + afterAll(function() { + // one-time final cleanup + }) +}; +``` + +**Note that `after` becomes `afterAll` and `before` becomes `beforeAll`.** + +Your original `test/test.spec.js` should now contain: + +```js +// test/test.spec.js + +describe('my test suite', function() { + it('should have run my global setup', function() { + // make assertion + }); +}); +``` + +Running `mocha --require test/hooks.js test/test.spec.js` will run as before (and is now ready to be used with [`--parallel`](#-parallel-p)). + +### Migrating a Library to use Root Hook PLugins + +If you're a library maintainer, and your library uses root-level hooks, you can migrate by refactoring your entry point. + +Your library should _always_ export a [`mochaHooks` object](#defining-a-root-hook-plugin). To maintain backwards compatibility, run your root level hooks _if and only if_ `global.beforeEach` (or other relevant hook) exists. + ## Interfaces Mocha's "interface" system allows developers to choose their style of DSL. Mocha has **BDD**, **TDD**, **Exports**, **QUnit** and **Require**-style interfaces. From 0b13c92b595a765a787295a13fa132eea0b74e61 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Mon, 20 Apr 2020 17:14:46 -0700 Subject: [PATCH 92/92] fix bad anchors --- docs/index.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/docs/index.md b/docs/index.md index ed3fec9d2f..857b1567c4 100644 --- a/docs/index.md +++ b/docs/index.md @@ -68,7 +68,7 @@ Mocha is a feature-rich JavaScript test framework running on [Node.js][] and in - [Diffs](#diffs) - [Command-Line Usage](#command-line-usage) - [Parallel Tests](#parallel-tests) -- [Root-Level Hook Plugins](#root-level-hook-plugins) +- [Root Hook Plugins](#root-hook-plugins) - [Interfaces](#interfaces) - [Reporters](#reporters) - [Node.JS native ESM support](#nodejs-native-esm-support) @@ -1291,14 +1291,14 @@ In parallel mode, we have no guarantees about the order in which test files will Because of this, the following options _cannot be used_ in parallel mode: -- [`--file`](#--file--file-directory-glob-) -- [`--sort`](#--sort) -- [`--delay`](#--delay) +- [`--file`](#-file-filedirectoryglob) +- [`--sort`](#-sort-s) +- [`--delay`](#delayed-root-suite) {:.single-column} ### Test Duration Variability -Because running tests in parallel mode uses more system resources at once, the OS may take extra time to schedule and complete some operations. For this reason, test timeouts may need to be increased either [globally](#--timeout--ms----t--ms-) or [otherwise](#timeouts). +Because running tests in parallel mode uses more system resources at once, the OS may take extra time to schedule and complete some operations. For this reason, test timeouts may need to be increased either [globally](#-timeout-ms-t-ms) or [otherwise](#timeouts). ### "Bail" is "Best Effort" @@ -1330,7 +1330,7 @@ When run (in the default "serial" mode) via `mocha --file "./test/setup.js" "./t There are a (minimum of) two workarounds for this: 1. `require('./setup.js')` or `import './setup.js'` at the top of every test file. Best avoided for those averse to boilerplate. -1. _Recommended_: Define root-level hooks in a required file, using the new (also as of v8.0.0) [Root Hook Plugin](#root-level-hook-plugins) system. +1. _Recommended_: Define root-level hooks in a required file, using the new (also as of v8.0.0) [Root Hook Plugin](#root-hook-plugins) system. ### No Browser Support @@ -1361,7 +1361,7 @@ It's unlikely (but not impossible) to see a performance gain from a [job count]( > _New in v8.0.0._ -In some cases, you may want to execute a [hook](#hooks) before (or after) every test in every file. Previous to v8.0.0, the way to accomplish this was to use `--file` combined with root-level hooks (see [example above](#root-level-hooks-are-not-global)). This still works in v8.0.0, but _not_ when running tests in parallel mode! +In some cases, you may want to execute a [hook](#hooks) before (or after) every test in every file. Previous to v8.0.0, the way to accomplish this was to use `--file` combined with root-level hooks (see [example above](#root-hooks-are-not-global)). This still works in v8.0.0, but _not_ when running tests in parallel mode! A Root-Level Hook Plugin is a JavaScript file loaded via [`--require`](#-require-module-r-module) which "registers" one or more root-level hooks to be used across all test files.