From 331fca996801292f0bb3579af0cbfd7fb9bddc21 Mon Sep 17 00:00:00 2001 From: Christopher Hiller Date: Wed, 6 May 2020 16:51:53 -0700 Subject: [PATCH] add WorkerPool wrapper, fix --grep - created a wrapper around the `workerpool` module to help decouple and give us a place to do serialization before invoking the worker's `run` method - do not warn about dubious max worker count (`jobs` option) since it's not something we can be sure is an actual problem (make it debug output instead) - adds [serialize-javascript](https://npm.im/serialize-javascript) module - allows easier transmission of non-JSON-compatible objects over IPC, e.g., a `RegExp` (for `--grep`) - requires use of `eval()` to deserialize. I'm not too worried about this, but I think I need to play with it more - this avoids more custom serialization code, but is not especially helpful when serializing `Test`, `Suite` and `Hook` instances - in the integration test helper code, if we provide an _absolute path_, do not make a guess about where the fixture is when running `runMocha` or `runMochaJSON`. this makes globs easier to use --- .eslintrc.yml | 1 + karma.conf.js | 1 + lib/buffered-runner.js | 44 ++---- lib/pool.js | 163 ++++++++++++++++++++++ lib/test.js | 5 +- lib/worker.js | 44 ++++-- package-lock.json | 5 + package-scripts.js | 2 +- package.json | 4 +- test/integration/helpers.js | 4 +- test/integration/options/parallel.spec.js | 43 ++++-- test/node-unit/buffered-runner.spec.js | 110 ++------------- test/node-unit/pool.spec.js | 148 ++++++++++++++++++++ test/node-unit/worker.spec.js | 14 +- 14 files changed, 422 insertions(+), 166 deletions(-) create mode 100644 lib/pool.js create mode 100644 test/node-unit/pool.spec.js diff --git a/.eslintrc.yml b/.eslintrc.yml index 4310cee1bc..08d3330b45 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -31,6 +31,7 @@ overrides: - 'lib/worker.js' - 'lib/reporters/buffered.js' - 'lib/serializer.js' + - 'lib/pool.js' - 'test/reporters/buffered.spec.js' parserOptions: ecmaVersion: 2018 diff --git a/karma.conf.js b/karma.conf.js index 86975e9d48..16292c472c 100644 --- a/karma.conf.js +++ b/karma.conf.js @@ -41,6 +41,7 @@ module.exports = config => { .ignore('./lib/reporters/buffered.js') .ignore('./lib/serializer.js') .ignore('./lib/worker.js') + .ignore('./lib/pool.js') .on('bundled', (err, content) => { if (err) { throw err; diff --git a/lib/buffered-runner.js b/lib/buffered-runner.js index 56ab2c7272..539d3e3366 100644 --- a/lib/buffered-runner.js +++ b/lib/buffered-runner.js @@ -1,15 +1,17 @@ 'use strict'; const allSettled = require('promise.allsettled'); -const os = require('os'); const Runner = require('./runner'); const {EVENT_RUN_BEGIN, EVENT_RUN_END} = Runner.constants; const debug = require('debug')('mocha:parallel:buffered-runner'); -const workerpool = require('workerpool'); -const {deserialize} = require('./serializer'); -const WORKER_PATH = require.resolve('./worker.js'); +const {WorkerPool} = require('./pool'); const {setInterval, clearInterval} = global; -const {createMap, warn} = require('./utils'); +const {createMap} = require('./utils'); + +/** + * Outputs a debug statement with worker stats + * @param {WorkerPool} pool - Worker pool + */ const debugStats = pool => { const {totalWorkers, busyWorkers, idleWorkers, pendingTasks} = pool.stats(); debug( @@ -108,28 +110,7 @@ class BufferedRunner extends Runner { let pool; try { - const cpuCount = os.cpus().length; - const maxJobs = cpuCount - 1; - const jobs = Math.max(1, Math.min(options.jobs || maxJobs, maxJobs)); - if (maxJobs < 2) { - warn( - `(Mocha) not enough CPU cores available (${cpuCount}) to run multiple jobs; avoid --parallel on this machine` - ); - } else if (options.jobs && options.jobs > maxJobs) { - warn( - `(Mocha) ${options.jobs} concurrent jobs requested, but only enough cores available for ${maxJobs}` - ); - } - debug( - 'run(): starting worker pool of size %d, using node args: %s', - jobs, - process.execArgv.join(' ') - ); - pool = workerpool.pool(WORKER_PATH, { - workerType: 'process', - maxWorkers: jobs, - forkOpts: {execArgv: process.execArgv} - }); + pool = WorkerPool.create({maxWorkers: options.jobs}); sigIntListener = async () => { if (this._state !== ABORTING) { @@ -137,7 +118,7 @@ class BufferedRunner extends Runner { this._state = ABORTING; try { - debug('run(): shutting down %d (max) workers', jobs); + debug('run(): force-terminating worker pool'); await pool.terminate(true); } catch (err) { console.error( @@ -155,10 +136,6 @@ class BufferedRunner extends Runner { process.once('SIGINT', sigIntListener); - // the "pool proxy" object is essentially just syntactic sugar to call a - // worker's procedure as one would a regular function. - const poolProxy = await pool.proxy(); - debugInterval = setInterval( () => debugStats(pool), DEBUG_STATS_INTERVAL @@ -174,12 +151,11 @@ class BufferedRunner extends Runner { files.map(async file => { debug('run(): enqueueing test file %s', file); try { - const result = await poolProxy.run(file, options); + const {failureCount, events} = await pool.run(file, options); if (this._state === BAILED) { // short-circuit after a graceful bail return; } - const {failureCount, events} = deserialize(result); debug( 'run(): completed run of file %s; %d failures / %d events', file, diff --git a/lib/pool.js b/lib/pool.js new file mode 100644 index 0000000000..bcf77019fb --- /dev/null +++ b/lib/pool.js @@ -0,0 +1,163 @@ +'use strict'; + +const serializeJavascript = require('serialize-javascript'); +const workerpool = require('workerpool'); +const {deserialize} = require('./serializer'); +const debug = require('debug')('mocha:parallel:pool'); +const {cpus} = require('os'); +const {createInvalidArgumentTypeError} = require('./errors'); + +const WORKER_PATH = require.resolve('./worker.js'); + +/** + * A mapping of Mocha `Options` objects to serialized values. + * + * This is helpful because we tend to same the same options over and over + * over IPC. + * @type {WeakMap} + */ +let optionsCache = new WeakMap(); + +/** + * Count of CPU cores + */ +const CPU_COUNT = cpus().length; + +/** + * Default max number of workers. + * + * We are already using one core for the main process, so assume only _n - 1_ are left. + * + * This is a reasonable default, but YMMV. + */ +const DEFAULT_MAX_WORKERS = CPU_COUNT - 1; + +/** + * These options are passed into the [workerpool](https://npm.im/workerpool) module. + * @type {Partial} + */ +const WORKER_POOL_DEFAULT_OPTS = { + // use child processes, not worker threads! + workerType: 'process', + // ensure the same flags sent to `node` for this `mocha` invocation are passed + // along to children + forkOpts: {execArgv: process.execArgv}, + maxWorkers: DEFAULT_MAX_WORKERS +}; + +/** + * A wrapper around a third-party worker pool implementation. + */ +class WorkerPool { + constructor(opts = WORKER_POOL_DEFAULT_OPTS) { + const maxWorkers = Math.max(1, opts.maxWorkers); + + if (maxWorkers < 2) { + debug( + 'not enough CPU cores available (%d) to run multiple jobs; avoid --parallel on this machine', + CPU_COUNT + ); + } else if (maxWorkers >= CPU_COUNT) { + debug( + '%d concurrent job(s) requested, but only %d core(s) available', + maxWorkers, + CPU_COUNT + ); + } + debug( + 'run(): starting worker pool of max size %d, using node args: %s', + maxWorkers, + process.execArgv.join(' ') + ); + + this.options = Object.assign({}, opts, {maxWorkers}); + this._pool = workerpool.pool(WORKER_PATH, this.options); + } + + /** + * Terminates all workers in the pool. + * @param {boolean} [force] - Whether to force-kill workers. By default, lets workers finish their current task before termination. + * @private + * @returns {Promise} + */ + async terminate(force = false) { + return this._pool.terminate(force); + } + + /** + * Adds a test file run to the worker pool queue for execution by a worker process. + * + * Handles serialization/deserialization. + * + * @param {string} filepath - Filepath of test + * @param {Options} [options] - Options for Mocha instance + * @private + * @returns {Promise} + */ + async run(filepath, options = {}) { + if (!filepath || typeof filepath !== 'string') { + throw createInvalidArgumentTypeError( + 'Expected a non-empty filepath', + 'filepath', + 'string' + ); + } + const serializedOptions = WorkerPool.serializeOptions(options); + const result = await this._pool.exec('run', [filepath, serializedOptions]); + return deserialize(result); + } + + /** + * Returns stats about the state of the worker processes in the pool. + * + * Used for debugging. + * + * @private + */ + stats() { + return this._pool.stats(); + } + + /** + * Instantiates a {@link WorkerPool}. + */ + static create(...args) { + return new WorkerPool(...args); + } + + /** + * Given Mocha options object `opts`, serialize into a format suitable for + * transmission over IPC. + * + * @param {Options} [opts] - Mocha options + * @private + * @returns {string} Serialized options + */ + static serializeOptions(opts = {}) { + if (!optionsCache.has(opts)) { + const serialized = serializeJavascript(opts, { + unsafe: true, // this means we don't care about XSS + ignoreFunction: true // do not serialize functions + }); + optionsCache.set(opts, serialized); + debug( + 'serializeOptions(): serialized options %O to: %s', + opts, + serialized + ); + } + return optionsCache.get(opts); + } + + /** + * Resets internal cache of serialized options objects. + * + * For testing/debugging + * @private + */ + static resetOptionsCache() { + optionsCache = new WeakMap(); + } +} + +exports.WorkerPool = WorkerPool; diff --git a/lib/test.js b/lib/test.js index b171831b61..21210151eb 100644 --- a/lib/test.js +++ b/lib/test.js @@ -72,7 +72,7 @@ Test.prototype.clone = function() { }; /** - * Returns an object suitable for IPC. + * Returns an minimal object suitable for transmission over IPC. * Functions are represented by keys beginning with `$$`. * @returns {Object} */ @@ -93,6 +93,7 @@ Test.prototype.serialize = function serialize() { speed: this.speed, state: this.state, title: this.title, - type: this.type + type: this.type, + file: this.file }; }; diff --git a/lib/worker.js b/lib/worker.js index 32cfaa246d..dc2be66457 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -1,13 +1,12 @@ 'use strict'; -const {createInvalidArgumentTypeError} = require('./errors'); +const { + createInvalidArgumentTypeError, + createInvalidArgumentValueError +} = require('./errors'); const workerpool = require('workerpool'); const Mocha = require('./mocha'); -const { - handleRequires, - validatePlugin, - loadRootHooks -} = require('./cli/run-helpers'); +const {handleRequires, validatePlugin} = require('./cli/run-helpers'); const debug = require('debug')(`mocha:parallel:worker:${process.pid}`); const {serialize} = require('./serializer'); const {setInterval, clearInterval} = global; @@ -34,20 +33,23 @@ if (workerpool.isMainThread) { * @param {Options} argv - Command-line options */ let bootstrap = async argv => { - const rawRootHooks = handleRequires(argv.require); - rootHooks = await loadRootHooks(rawRootHooks); + handleRequires(argv.require); + // const rawRootHooks = handleRequires(argv.require); + // rootHooks = await loadRootHooks(rawRootHooks); validatePlugin(argv, 'ui', Mocha.interfaces); bootstrap = () => {}; + debug('bootstrap(): finished with args: %O', argv); }; /** * Runs a single test file in a worker thread. * @param {string} filepath - Filepath of test file - * @param {Options} [argv] - Parsed command-line options object + * @param {string} [serializedOptions] - **Serialized** options. This string will be eval'd! + * @see https://npm.im/serialize-javascript * @returns {Promise<{failures: number, events: BufferedEvent[]}>} - Test * failure count and list of events. */ -async function run(filepath, argv = {ui: 'bdd'}) { +async function run(filepath, serializedOptions = '{}') { if (!filepath) { throw createInvalidArgumentTypeError( 'Expected a non-empty "filepath" argument', @@ -58,7 +60,27 @@ async function run(filepath, argv = {ui: 'bdd'}) { debug('run(): running test file %s', filepath); - const opts = Object.assign(argv, { + if (typeof serializedOptions !== 'string') { + throw createInvalidArgumentTypeError( + 'run() expects second parameter to be a string which was serialized by the `serialize-javascript` module', + 'serializedOptions', + 'string' + ); + } + let argv; + try { + // eslint-disable-next-line no-eval + argv = eval('(' + serializedOptions + ')'); + } catch (err) { + throw createInvalidArgumentValueError( + 'run() was unable to deserialize the options', + 'serializedOptions', + serializedOptions + ); + } + + debug('run(): deserialized options to %O', argv); + const opts = Object.assign({ui: 'bdd'}, argv, { // workers only use the `Buffered` reporter. reporter: BUFFERED_REPORTER_PATH, // if this was true, it would cause infinite recursion. diff --git a/package-lock.json b/package-lock.json index 3d18e37a3e..4e1edfcdf7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15089,6 +15089,11 @@ } } }, + "serialize-javascript": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-3.0.0.tgz", + "integrity": "sha512-skZcHYw2vEX4bw90nAr2iTTsz6x2SrHEnfxgKYmZlvJYBEZrvbKtobJWlQ20zczKb3bsHHXXTYt48zBA7ni9cw==" + }, "serve-index": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/serve-index/-/serve-index-1.9.1.tgz", diff --git a/package-scripts.js b/package-scripts.js index 1270af93a7..5236b66b9c 100644 --- a/package-scripts.js +++ b/package-scripts.js @@ -32,7 +32,7 @@ function test(testName, mochaParams) { module.exports = { scripts: { build: { - script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore './lib/buffered-runner.js' --ignore './lib/serializer.js' --ignore './lib/reporters/buffered.js' --ignore './lib/worker.js' -o mocha.js`, + script: `browserify -e browser-entry.js --plugin ./scripts/dedefine --ignore './lib/cli/*.js' --ignore "./lib/esm-utils.js" --ignore 'chokidar' --ignore 'fs' --ignore 'glob' --ignore 'path' --ignore 'supports-color' --ignore './lib/buffered-runner.js' --ignore './lib/serializer.js' --ignore './lib/reporters/buffered.js' --ignore './lib/worker.js' --ignore './lib/pool.js' -o mocha.js`, description: 'Build browser bundle' }, lint: { diff --git a/package.json b/package.json index a1292cf476..cf89c89bfe 100644 --- a/package.json +++ b/package.json @@ -67,6 +67,7 @@ "ms": "2.1.2", "object.assign": "4.1.0", "promise.allsettled": "1.0.2", + "serialize-javascript": "3.0.0", "strip-json-comments": "3.0.1", "supports-color": "7.1.0", "which": "2.0.2", @@ -164,7 +165,8 @@ "./lib/serializer.js": false, "./lib/reporters/buffered.js": false, "./lib/buffered-reporter.js": false, - "./lib/worker.js": false + "./lib/worker.js": false, + "./lib/pool.js": false }, "prettier": { "singleQuote": true, diff --git a/test/integration/helpers.js b/test/integration/helpers.js index 35995fa2f7..6475262443 100644 --- a/test/integration/helpers.js +++ b/test/integration/helpers.js @@ -392,7 +392,9 @@ function resolveFixturePath(fixture) { if (path.extname(fixture) !== '.js' && path.extname(fixture) !== '.mjs') { fixture += '.fixture.js'; } - return path.join('test', 'integration', 'fixtures', fixture); + return path.isAbsolute(fixture) + ? fixture + : path.join('test', 'integration', 'fixtures', fixture); } /** diff --git a/test/integration/options/parallel.spec.js b/test/integration/options/parallel.spec.js index d860e35429..269ccdac79 100644 --- a/test/integration/options/parallel.spec.js +++ b/test/integration/options/parallel.spec.js @@ -4,7 +4,6 @@ var path = require('path'); var helpers = require('../helpers'); var runMochaAsync = helpers.runMochaAsync; var invokeMochaAsync = helpers.invokeMochaAsync; -var getSummary = helpers.getSummary; var utils = require('../../../lib/utils'); function compareReporters(reporter) { @@ -107,21 +106,17 @@ describe('--parallel', function() { it('should have the same result as with --no-parallel', function() { this.timeout(Math.min(this.timeout(), 5000)); - var args = [ - path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs') - ].concat(esmArgs); - return invokeMochaAsync(args.concat('--no-parallel'))[1].then(function( + var glob = path.join(__dirname, '..', 'fixtures', 'esm', '*.fixture.mjs'); + return runMochaAsync(glob, esmArgs.concat('--no-parallel')).then(function( expected ) { - var expectedSummary = getSummary(expected); - return invokeMochaAsync(args.concat('--parallel'))[1].then(function( + return runMochaAsync(glob, esmArgs.concat('--parallel')).then(function( actual ) { - var actualSummary = getSummary(actual); - expect(actualSummary, 'to satisfy', { - pending: expectedSummary.pending, - passing: expectedSummary.passing, - failing: expectedSummary.failing + expect(actual, 'to satisfy', { + pending: expected.pending, + passing: expected.passing, + failing: expected.failing }); }); }); @@ -250,6 +245,30 @@ describe('--parallel', function() { }); }); + describe('when used with "grep"', function() { + it('should be equivalent to running in serial', function() { + this.timeout(Math.max(this.timeout(), 5000)); + return runMochaAsync( + path.join('options', 'parallel', 'test-*.fixture.js'), + ['--no-parallel', '--grep="suite d"'] + ).then(function(expected) { + return expect( + runMochaAsync(path.join('options', 'parallel', 'test-*.fixture.js'), [ + '--parallel', + '--grep="suite d"' + ]), + 'to be fulfilled with value satisfying', + { + passing: expected.passing, + failing: expected.failing, + pending: expected.pending, + code: expected.code + } + ); + }); + }); + }); + describe('reporter equivalence', function() { // each reporter name is duplicated; one is in all lower-case // 'base' is abstract, 'html' is browser-only, others are incompatible diff --git a/test/node-unit/buffered-runner.spec.js b/test/node-unit/buffered-runner.spec.js index 0330c62241..05371447f5 100644 --- a/test/node-unit/buffered-runner.spec.js +++ b/test/node-unit/buffered-runner.spec.js @@ -15,8 +15,8 @@ const {createSandbox} = require('sinon'); describe('buffered-runner', function() { describe('BufferedRunner', function() { let sandbox; - let pool; let run; + let WorkerPool; let terminate; let BufferedRunner; let suite; @@ -32,22 +32,21 @@ describe('buffered-runner', function() { // tests will want to further define the behavior of these. run = sandbox.stub(); terminate = sandbox.stub(); - - pool = sandbox.stub().returns({ - proxy: sandbox.stub().resolves({ - run - }), - terminate, - stats: sandbox.stub().returns({}) - }); + WorkerPool = { + create: sandbox.stub().returns({ + run, + terminate, + stats: sandbox.stub().returns({}) + }) + }; BufferedRunner = rewiremock.proxy(BUFFERED_RUNNER_PATH, r => ({ - workerpool: { - pool + '../../lib/pool': { + WorkerPool }, os: { cpus: sandbox.stub().callsFake(() => new Array(cpuCount)) }, - [require.resolve('../../lib/utils')]: r.with({warn}).callThrough() + '../../lib/utils': r.with({warn}).callThrough() })); }); @@ -147,93 +146,6 @@ describe('buffered-runner', function() { }); }); - describe('when not provided a max job count', function() { - it('should use a max job count based on CPU cores', function(done) { - runner.run( - () => { - expect(pool, 'to have a call satisfying', { - args: [ - expect.it('to be a', 'string'), - { - maxWorkers: Math.max(cpuCount - 1, 1) - } - ] - }); - done(); - }, - {files: [], options: {}} - ); - }); - }); - - describe('when provided a max job count', function() { - beforeEach(function() { - cpuCount = 8; - }); - - it('should use the provided max count', function(done) { - runner.run( - () => { - expect(pool, 'to have a call satisfying', { - args: [ - expect.it('to be a', 'string'), - { - maxWorkers: 4 - } - ] - }); - done(); - }, - { - files: [], - options: { - jobs: 4 - } - } - ); - }); - - describe('when the max job count exceeds the CPU count', function() { - it('should warn', function(done) { - run.resolves({failureCount: 0, events: []}); - runner.run( - () => { - expect(warn, 'to have a call satisfying', [ - /only enough cores available/ - ]); - done(); - }, - { - files: [], - options: {jobs: 16} - } - ); - }); - }); - - describe('when there are not enough CPU cores', function() { - beforeEach(function() { - cpuCount = 2; - }); - - it('should warn', function(done) { - run.resolves({failureCount: 0, events: []}); - runner.run( - () => { - expect(warn, 'to have a call satisfying', [ - /avoid --parallel on this machine/ - ]); - done(); - }, - { - files: [], - options: {jobs: 4} - } - ); - }); - }); - }); - describe('when suite should bail', function() { describe('when no event contains an error', function() { it('should not force-terminate', function(done) { diff --git a/test/node-unit/pool.spec.js b/test/node-unit/pool.spec.js new file mode 100644 index 0000000000..75b7d8da9c --- /dev/null +++ b/test/node-unit/pool.spec.js @@ -0,0 +1,148 @@ +'use strict'; + +const rewiremock = require('rewiremock/node'); +const {createSandbox} = require('sinon'); + +describe('class WorkerPool', function() { + let WorkerPool; + let sandbox; + let pool; + let stats; + let serializeJavascript; + let serializer; + let result; + + beforeEach(function() { + sandbox = createSandbox(); + stats = {totalWorkers: 10, busyWorkers: 8, idleWorkers: 2, pendingTasks: 3}; + result = {failures: 0, events: []}; + pool = { + terminate: sandbox.stub().resolves(), + exec: sandbox.stub().resolves(result), + stats: sandbox.stub().returns(stats) + }; + serializer = { + deserialize: sandbox.stub() + }; + + serializeJavascript = sandbox.spy(require('serialize-javascript')); + WorkerPool = rewiremock.proxy(require.resolve('../../lib/pool'), { + workerpool: { + pool: sandbox.stub().returns(pool) + }, + '../../lib/serializer': serializer, + 'serialize-javascript': serializeJavascript + }).WorkerPool; + + // reset cache + WorkerPool.resetOptionsCache(); + }); + + afterEach(function() { + sandbox.restore(); + }); + + describe('static method', function() { + describe('create()', function() { + it('should return a WorkerPool instance', function() { + expect(WorkerPool.create({foo: 'bar'}), 'to be a', WorkerPool); + }); + + describe('when passed no arguments', function() { + it('should not throw', function() { + expect(WorkerPool.create, 'not to throw'); + }); + }); + }); + + describe('serializeOptions()', function() { + describe('when passed no arguments', function() { + it('should not throw', function() { + expect(WorkerPool.serializeOptions, 'not to throw'); + }); + }); + + it('should return a serialized string', function() { + expect(WorkerPool.serializeOptions({foo: 'bar'}), 'to be a', 'string'); + }); + + describe('when called multiple times with the same object', function() { + it('should not perform serialization twice', function() { + const obj = {foo: 'bar'}; + WorkerPool.serializeOptions(obj); + WorkerPool.serializeOptions(obj); + expect(serializeJavascript, 'was called once'); + }); + + it('should return the same value', function() { + const obj = {foo: 'bar'}; + expect( + WorkerPool.serializeOptions(obj), + 'to be', + WorkerPool.serializeOptions(obj) + ); + }); + }); + }); + }); + + describe('constructor', function() { + it('should apply defaults', function() { + expect(new WorkerPool(), 'to satisfy', { + options: { + workerType: 'process', + forkOpts: {execArgv: process.execArgv}, + maxWorkers: expect.it('to be greater than or equal to', 1) + } + }); + }); + }); + + describe('instance method', function() { + let workerPool; + + beforeEach(function() { + workerPool = WorkerPool.create(); + }); + + describe('stats()', function() { + it('should return the object returned by `workerpool.Pool#stats`', function() { + expect(workerPool.stats(), 'to be', stats); + }); + }); + + describe('run()', function() { + describe('when passed no arguments', function() { + it('should reject', async function() { + return expect(workerPool.run(), 'to be rejected with', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + describe('when passed a non-string filepath', function() { + it('should reject', async function() { + return expect(workerPool.run(123), 'to be rejected with', { + code: 'ERR_MOCHA_INVALID_ARG_TYPE' + }); + }); + }); + + it('should serialize the options object', async function() { + await workerPool.run('file.js', {foo: 'bar'}); + + expect(pool.exec, 'to have a call satisfying', [ + 'run', + ['file.js', '{"foo":"bar"}'] + ]).and('was called once'); + }); + + it('should deserialize the result', async function() { + await workerPool.run('file.js', {foo: 'bar'}); + expect(serializer.deserialize, 'to have a call satisfying', [ + result + ]).and('was called once'); + }); + }); + }); +}); diff --git a/test/node-unit/worker.spec.js b/test/node-unit/worker.spec.js index 94c3ceef18..7de8b0cb1d 100644 --- a/test/node-unit/worker.spec.js +++ b/test/node-unit/worker.spec.js @@ -1,5 +1,6 @@ 'use strict'; +const serializeJavascript = require('serialize-javascript'); const {SerializableWorkerResult} = require('../../lib/serializer'); const rewiremock = require('rewiremock/node'); const {createSandbox} = require('sinon'); @@ -17,7 +18,7 @@ describe('worker', function() { sandbox.spy(process, 'removeAllListeners'); }); - describe('when run as main "thread"', function() { + describe('when run as main process', function() { it('should throw', function() { expect(() => { rewiremock.proxy(WORKER_PATH, { @@ -30,7 +31,7 @@ describe('worker', function() { }); }); - describe('when run as "worker thread"', function() { + describe('when run as worker process', function() { class MockMocha {} let serializer; let runHelpers; @@ -112,15 +113,18 @@ describe('worker', function() { }); it('should handle "--require"', async function() { - await worker.run('some-file.js', {require: 'foo'}); + await worker.run( + 'some-file.js', + serializeJavascript({require: 'foo'}) + ); expect(runHelpers.handleRequires, 'to have a call satisfying', [ 'foo' ]).and('was called once'); }); it('should handle "--ui"', async function() { - const argv = {}; - await worker.run('some-file.js', argv); + const argv = {foo: 'bar'}; + await worker.run('some-file.js', serializeJavascript(argv)); expect(runHelpers.validatePlugin, 'to have a call satisfying', [ argv,