From 0d40ec28fceb448e6a63713e544dd1e11668a4a7 Mon Sep 17 00:00:00 2001 From: isaacs Date: Wed, 3 Apr 2019 22:03:06 -0700 Subject: [PATCH 1/6] THIS IS A WIP COMMIT FOR REVIEW ONLY, DO NOT MERGE Docs and tests still TK. Future commit message follows. --- feat: add processinfo index, add externalId If a NYC_PROCESSINFO_EXTERNAL_ID environment variable is set, then it is saved in the processinfo as `externalId`. BREAKING CHANGE: This adds a file named 'index.json' to the .nyc_output/processinfo directory, which has a different format from the other files in this dir. Furthermore, when this file is generated, some additional helpful metadata is memoized to the processinfo json files, to minimize the cost of repeated generation. (This isn't necessarily a breaking change, but it is an update to the de facto schema for those files.) As soon as possible, index generation and process tree display should be migrated out to a new 'istanbul-lib-processinfo' library. This opens the door to add features in the v14 release family to improve support for partial/resumed test runs and file watching. - When a process is run with --clean=false and a previously seen externalId, clear away all the coverage files in the set for that externalId. - When a file is changed, a test runner can use the index to determine which tests (by externalId) ought to be re-run. --- bin/nyc.js | 4 ++ bin/wrap.js | 4 ++ index.js | 101 +++++++++++++++++++++++++++++++++++++++++++- lib/process.js | 1 + test/processinfo.js | 96 +++++++++++++++++++++++++++++++++++++++++ 5 files changed, 205 insertions(+), 1 deletion(-) create mode 100644 test/processinfo.js diff --git a/bin/nyc.js b/bin/nyc.js index 2a2ea38e2..80f97c433 100755 --- a/bin/nyc.js +++ b/bin/nyc.js @@ -65,6 +65,10 @@ if ([ ), function (done) { var mainChildExitCode = process.exitCode + if (argv.showProcessTree || argv.buildProcessTree) { + nyc.writeProcessIndex() + } + if (argv.checkCoverage) { nyc.checkCoverage({ lines: argv.lines, diff --git a/bin/wrap.js b/bin/wrap.js index cc01d23a7..b7d995ffc 100644 --- a/bin/wrap.js +++ b/bin/wrap.js @@ -10,6 +10,10 @@ config._processInfo = { ppid: process.ppid, root: process.env.NYC_ROOT_ID } +if (process.env.NYC_PROCESSINFO_EXTERNAL_ID) { + config._processInfo.externalId = process.env.NYC_PROCESSINFO_EXTERNAL_ID + delete process.env.NYC_PROCESSINFO_EXTERNAL_ID +} ;(new NYC(config)).wrap() diff --git a/index.js b/index.js index 706a67d62..006ff268a 100755 --- a/index.js +++ b/index.js @@ -412,6 +412,105 @@ NYC.prototype.report = function () { } } +// XXX(@isaacs) Index generation should move to istanbul-lib-processinfo +NYC.prototype.writeProcessIndex = function () { + const dir = this.processInfoDirectory() + const pidToUid = new Map() + const infoByUid = new Map() + const eidToUid = new Map() + const infos = fs.readdirSync(dir).filter(f => f !== 'index.json').map(f => { + try { + const info = JSON.parse(fs.readFileSync(path.resolve(dir, f), 'utf-8')) + // on thiis first read pass, also map the pids to uuids + info.uuid = path.basename(f, '.json') + pidToUid.set(info.uuid, info.pid) + pidToUid.set(info.pid, info.uuid) + infoByUid.set(info.uuid, info) + if (info.externalId) { + eidToUid.set(info.externalId, info.uuid) + } + return info + } catch (er) { + return null + } + }).filter(Boolean) + + // create all the parent-child links and write back the updated info + const needsUpdate = new Set() + infos.forEach(info => { + if (info.ppid && info.ppid !== '0' && !info.parent) { + info.parent = pidToUid.get(info.ppid) + needsUpdate.add(info) + } + if (info.parent) { + const parentInfo = infoByUid.get(info.parent) + if (parentInfo.children.indexOf(info.uuid) === -1) { + parentInfo.children.push(info.uuid) + needsUpdate.add(parentInfo) + } + } + }) + + // figure out which files were touched by each process. + const files = infos.reduce((files, info) => { + if (!info.files) { + try { + info.files = Object.keys(JSON.parse(fs.readFileSync( + path.resolve(this.tempDirectory(), info.coverageFilename), + 'utf-8' + ))) + } catch (er) { + return files + } + needsUpdate.add(info) + } + info.files.forEach(f => { + files[f] = files[f] || [] + files[f].push(info.uuid) + }) + return files + }, {}) + + // build the actual index! + const index = infos.reduce((index, info) => { + index.processes[info.uuid] = {} + if (info.parent) { + index.processes[info.uuid].parent = info.parent + } + if (info.externalId) { + index.processes[info.uuid].externalId = info.externalId + index.externalIds[info.externalId] = { + root: info.uuid, + children: info.children + } + } + if (info.children && info.children.length) { + index.processes[info.uuid].children = Array.from(info.children) + } + return index + }, { processes: {}, files: files, externalIds: {} }) + + // flatten the descendant sets of all the externalId procs + Object.keys(index.externalIds).forEach(eid => { + const { children } = index.externalIds[eid] + // push the next generation onto the list so we accumulate them all + for (let i = 0; i < children.length; i++) { + const nextGen = index.processes[children[i]].children + if (nextGen && nextGen.length) { + children.push(...nextGen.filter(uuid => children.indexOf(uuid) === -1)) + } + } + }) + + needsUpdate.forEach(info => { + fs.writeFileSync( + path.resolve(dir, info.uuid + '.json'), JSON.stringify(info) + ) + }) + + fs.writeFileSync(path.resolve(dir, 'index.json'), JSON.stringify(index)) +} + NYC.prototype.showProcessTree = function () { var processTree = ProcessInfo.buildProcessTree(this._loadProcessInfos()) @@ -451,7 +550,7 @@ NYC.prototype._loadProcessInfos = function () { var _this = this var files = fs.readdirSync(this.processInfoDirectory()) - return files.map(function (f) { + return files.filter(f => f !== 'index.json').map(function (f) { try { return new ProcessInfo(JSON.parse(fs.readFileSync( path.resolve(_this.processInfoDirectory(), f), diff --git a/lib/process.js b/lib/process.js index c77ca503f..fb62ab6a9 100644 --- a/lib/process.js +++ b/lib/process.js @@ -13,6 +13,7 @@ function ProcessInfo (defaults) { this.root = null this.coverageFilename = null this.nodes = [] // list of children, filled by buildProcessTree() + this.children = [] // just uuids, not full nodes this._coverageMap = null diff --git a/test/processinfo.js b/test/processinfo.js new file mode 100644 index 000000000..814f050d3 --- /dev/null +++ b/test/processinfo.js @@ -0,0 +1,96 @@ +const {resolve} = require('path') +const bin = resolve(__dirname, '../self-coverage/bin/nyc') +const {spawn} = require('child_process') +const t = require('tap') +const rimraf = require('rimraf') +const node = process.execPath +const fixturesCLI = resolve(__dirname, './fixtures/cli') +const tmp = 'processinfo-test' +const fs = require('fs') +const resolvedJS = resolve(fixturesCLI, 'selfspawn-fibonacci.js') + +rimraf.sync(resolve(fixturesCLI, tmp)) +t.teardown(() => rimraf.sync(resolve(fixturesCLI, tmp))) + +t.test('build some processinfo', t => { + var args = [ + bin, '-t', tmp, '--build-process-tree', + node, 'selfspawn-fibonacci.js', '5', + ] + var proc = spawn(process.execPath, args, { + cwd: fixturesCLI, + env: { + PATH: process.env.PATH, + NYC_PROCESSINFO_EXTERNAL_ID: 'blorp', + } + }) + // don't actually care about the output for this test, just the data + proc.stderr.resume() + proc.stdout.resume() + proc.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + t.end() + }) +}) + +t.test('validate the created processinfo data', t => { + const covs = fs.readdirSync(resolve(fixturesCLI, tmp)) + .filter(f => f !== 'processinfo') + t.plan(covs.length * 2) + + covs.forEach(f => { + fs.readFile(resolve(fixturesCLI, tmp, f), 'utf8', (er, covjson) => { + if (er) + throw er + const covdata = JSON.parse(covjson) + t.same(Object.keys(covdata), [resolvedJS]) + // should have matching processinfo for each cov json + const procInfoFile = resolve(fixturesCLI, tmp, 'processinfo', f) + fs.readFile(procInfoFile, 'utf8', (er, procInfoJson) => { + if (er) + throw er + const procInfoData = JSON.parse(procInfoJson) + t.match(procInfoData, { + pid: /^[0-9]+$/, + ppid: /^[0-9]+$/, + uuid: f.replace(/\.json$/, ''), + argv: [ + node, + resolvedJS, + /[1-5]/, + ], + execArgv: [], + cwd: fixturesCLI, + time: Number, + root: /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/, + coverageFilename: resolve(fixturesCLI, tmp, f), + nodes: [], + _coverageMap: null, + files: [ resolvedJS ], + children: Array, + }) + }) + }) + }) +}) + +t.test('check out the index', t => { + const indexFile = resolve(fixturesCLI, tmp, 'processinfo', 'index.json') + const indexJson = fs.readFileSync(indexFile, 'utf-8') + const index = JSON.parse(indexJson) + const u = /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/ + t.match(index, { + processes: {}, + files: { + [resolvedJS]: [u, u, u, u, u, u, u, u, u ], + }, + externalIds: { + blorp: { + root: u, + children: [u, u, u, u, u, u, u, u ], + }, + }, + }) + t.end() +}) From 22fdb2ed0a69b237879faedcea7dc8d9ca2f3839 Mon Sep 17 00:00:00 2001 From: isaacs Date: Fri, 5 Apr 2019 18:43:08 -0700 Subject: [PATCH 2/6] SQUASH: Update process index based on review - Adds a NYC_PROCESS_ID to environment - Adds `parent` to processInfo object, a uuid referring to parent. - Rebase onto processinfo-numeric-pids branch - Avoid re-writing the processinfo/{uuid}.json files Next: - Update process tree output to rely on process index instead of duplicating effort. --- bin/wrap.js | 1 + index.js | 37 ++++++++----------------------------- lib/process.js | 14 +++++++++----- test/processinfo.js | 7 ++----- 4 files changed, 20 insertions(+), 39 deletions(-) diff --git a/bin/wrap.js b/bin/wrap.js index b7d995ffc..e2c0a8deb 100644 --- a/bin/wrap.js +++ b/bin/wrap.js @@ -8,6 +8,7 @@ config.isChildProcess = true config._processInfo = { pid: process.pid, ppid: process.ppid, + parent: process.env.NYC_PROCESS_ID, root: process.env.NYC_ROOT_ID } if (process.env.NYC_PROCESSINFO_EXTERNAL_ID) { diff --git a/index.js b/index.js index 006ff268a..84e370a6c 100755 --- a/index.js +++ b/index.js @@ -312,6 +312,7 @@ NYC.prototype._wrapExit = function () { } NYC.prototype.wrap = function (bin) { + process.env.NYC_PROCESS_ID = this.processInfo.uuid this._addRequireHooks() this._wrapExit() this._loadAdditionalModules() @@ -341,7 +342,7 @@ NYC.prototype.writeCoverageFile = function () { coverage = this.sourceMaps.remapCoverage(coverage) } - var id = this.generateUniqueID() + var id = this.processInfo.uuid var coverageFilename = path.resolve(this.tempDirectory(), id + '.json') fs.writeFileSync( @@ -355,6 +356,7 @@ NYC.prototype.writeCoverageFile = function () { } this.processInfo.coverageFilename = coverageFilename + this.processInfo.files = Object.keys(coverage) fs.writeFileSync( path.resolve(this.processInfoDirectory(), id + '.json'), @@ -421,8 +423,7 @@ NYC.prototype.writeProcessIndex = function () { const infos = fs.readdirSync(dir).filter(f => f !== 'index.json').map(f => { try { const info = JSON.parse(fs.readFileSync(path.resolve(dir, f), 'utf-8')) - // on thiis first read pass, also map the pids to uuids - info.uuid = path.basename(f, '.json') + info.children = [] pidToUid.set(info.uuid, info.pid) pidToUid.set(info.pid, info.uuid) infoByUid.set(info.uuid, info) @@ -436,34 +437,17 @@ NYC.prototype.writeProcessIndex = function () { }).filter(Boolean) // create all the parent-child links and write back the updated info - const needsUpdate = new Set() infos.forEach(info => { - if (info.ppid && info.ppid !== '0' && !info.parent) { - info.parent = pidToUid.get(info.ppid) - needsUpdate.add(info) - } if (info.parent) { const parentInfo = infoByUid.get(info.parent) if (parentInfo.children.indexOf(info.uuid) === -1) { parentInfo.children.push(info.uuid) - needsUpdate.add(parentInfo) } } }) // figure out which files were touched by each process. const files = infos.reduce((files, info) => { - if (!info.files) { - try { - info.files = Object.keys(JSON.parse(fs.readFileSync( - path.resolve(this.tempDirectory(), info.coverageFilename), - 'utf-8' - ))) - } catch (er) { - return files - } - needsUpdate.add(info) - } info.files.forEach(f => { files[f] = files[f] || [] files[f].push(info.uuid) @@ -478,15 +462,16 @@ NYC.prototype.writeProcessIndex = function () { index.processes[info.uuid].parent = info.parent } if (info.externalId) { + if (index.externalIds[info.externalId]) { + throw new Error(`External ID ${info.externalId} used by multiple processes`) + } index.processes[info.uuid].externalId = info.externalId index.externalIds[info.externalId] = { root: info.uuid, children: info.children } } - if (info.children && info.children.length) { - index.processes[info.uuid].children = Array.from(info.children) - } + index.processes[info.uuid].children = Array.from(info.children || []) return index }, { processes: {}, files: files, externalIds: {} }) @@ -502,12 +487,6 @@ NYC.prototype.writeProcessIndex = function () { } }) - needsUpdate.forEach(info => { - fs.writeFileSync( - path.resolve(dir, info.uuid + '.json'), JSON.stringify(info) - ) - }) - fs.writeFileSync(path.resolve(dir, 'index.json'), JSON.stringify(index)) } diff --git a/lib/process.js b/lib/process.js index fb62ab6a9..608b5ec29 100644 --- a/lib/process.js +++ b/lib/process.js @@ -1,9 +1,12 @@ const archy = require('archy') const libCoverage = require('istanbul-lib-coverage') +const uuid = require('uuid/v4') function ProcessInfo (defaults) { defaults = defaults || {} + this.uuid = null + this.parent = null this.pid = String(process.pid) this.argv = process.argv this.execArgv = process.execArgv @@ -12,14 +15,14 @@ function ProcessInfo (defaults) { this.ppid = null this.root = null this.coverageFilename = null - this.nodes = [] // list of children, filled by buildProcessTree() - this.children = [] // just uuids, not full nodes - - this._coverageMap = null for (var key in defaults) { this[key] = defaults[key] } + + if (!this.uuid) { + this.uuid = uuid() + } } Object.defineProperty(ProcessInfo.prototype, 'label', { @@ -37,7 +40,7 @@ Object.defineProperty(ProcessInfo.prototype, 'label', { }) ProcessInfo.buildProcessTree = function (infos) { - var treeRoot = new ProcessInfo({ _label: 'nyc' }) + var treeRoot = new ProcessInfo({ _label: 'nyc', nodes: [] }) var nodes = { } infos = infos.sort(function (a, b) { @@ -45,6 +48,7 @@ ProcessInfo.buildProcessTree = function (infos) { }) infos.forEach(function (p) { + p.nodes = [] nodes[p.root + ':' + p.pid] = p }) diff --git a/test/processinfo.js b/test/processinfo.js index 814f050d3..f3a7b151f 100644 --- a/test/processinfo.js +++ b/test/processinfo.js @@ -52,8 +52,8 @@ t.test('validate the created processinfo data', t => { throw er const procInfoData = JSON.parse(procInfoJson) t.match(procInfoData, { - pid: /^[0-9]+$/, - ppid: /^[0-9]+$/, + pid: Number, + ppid: Number, uuid: f.replace(/\.json$/, ''), argv: [ node, @@ -65,10 +65,7 @@ t.test('validate the created processinfo data', t => { time: Number, root: /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/, coverageFilename: resolve(fixturesCLI, tmp, f), - nodes: [], - _coverageMap: null, files: [ resolvedJS ], - children: Array, }) }) }) From 362cdcefa075f2b0fcf35a958108741b52ced9f8 Mon Sep 17 00:00:00 2001 From: isaacs Date: Fri, 5 Apr 2019 19:23:54 -0700 Subject: [PATCH 3/6] SQUASH: generate process tree using processinfo index Also, remove some unnecessary fields from process infos --- bin/wrap.js | 2 +- index.js | 28 ++++++++++++++++------------ lib/process.js | 33 +++++++++++---------------------- 3 files changed, 28 insertions(+), 35 deletions(-) diff --git a/bin/wrap.js b/bin/wrap.js index e2c0a8deb..1cff275e6 100644 --- a/bin/wrap.js +++ b/bin/wrap.js @@ -8,7 +8,7 @@ config.isChildProcess = true config._processInfo = { pid: process.pid, ppid: process.ppid, - parent: process.env.NYC_PROCESS_ID, + parent: process.env.NYC_PROCESS_ID || null, root: process.env.NYC_ROOT_ID } if (process.env.NYC_PROCESSINFO_EXTERNAL_ID) { diff --git a/index.js b/index.js index 84e370a6c..ba666ffdc 100755 --- a/index.js +++ b/index.js @@ -458,9 +458,7 @@ NYC.prototype.writeProcessIndex = function () { // build the actual index! const index = infos.reduce((index, info) => { index.processes[info.uuid] = {} - if (info.parent) { - index.processes[info.uuid].parent = info.parent - } + index.processes[info.uuid].parent = info.parent if (info.externalId) { if (index.externalIds[info.externalId]) { throw new Error(`External ID ${info.externalId} used by multiple processes`) @@ -526,19 +524,25 @@ NYC.prototype._checkCoverage = function (summary, thresholds, file) { } NYC.prototype._loadProcessInfos = function () { - var _this = this - var files = fs.readdirSync(this.processInfoDirectory()) - - return files.filter(f => f !== 'index.json').map(function (f) { + return fs.readdirSync(this.processInfoDirectory()).map(f => { + let data try { - return new ProcessInfo(JSON.parse(fs.readFileSync( - path.resolve(_this.processInfoDirectory(), f), + data = JSON.parse(fs.readFileSync( + path.resolve(this.processInfoDirectory(), f), 'utf-8' - ))) + )) } catch (e) { // handle corrupt JSON output. - return {} + return null } - }) + if (f !== 'index.json') { + data.nodes = [] + data = new ProcessInfo(data) + } + return { file: path.basename(f, '.json'), data: data } + }).filter(Boolean).reduce((infos, info) => { + infos[info.file] = info.data + return infos + }, {}) } NYC.prototype.eachReport = function (filenames, iterator, baseDirectory) { diff --git a/lib/process.js b/lib/process.js index 608b5ec29..b96af5111 100644 --- a/lib/process.js +++ b/lib/process.js @@ -40,30 +40,19 @@ Object.defineProperty(ProcessInfo.prototype, 'label', { }) ProcessInfo.buildProcessTree = function (infos) { - var treeRoot = new ProcessInfo({ _label: 'nyc', nodes: [] }) - var nodes = { } - - infos = infos.sort(function (a, b) { - return a.time - b.time - }) - - infos.forEach(function (p) { - p.nodes = [] - nodes[p.root + ':' + p.pid] = p - }) - - infos.forEach(function (p) { - if (!p.ppid) { - return + const treeRoot = new ProcessInfo({ _label: 'nyc', nodes: [] }) + const index = infos.index + for (const id in index.processes) { + const node = infos[id] + if (!node) { + throw new Error(`Invalid entry in processinfo index: ${id}`) } - - var parent = nodes[p.root + ':' + p.ppid] - if (!parent) { - parent = treeRoot + const idx = index.processes[id] + node.nodes = idx.children.map(id => infos[id]) + if (!node.parent) { + treeRoot.nodes.push(node) } - - parent.nodes.push(p) - }) + } return treeRoot } From 4c27fbfa36cd3aa2b203ce798f5e9cc668af493b Mon Sep 17 00:00:00 2001 From: isaacs Date: Fri, 5 Apr 2019 20:28:27 -0700 Subject: [PATCH 4/6] Sort children when creating tree, for consistency --- lib/process.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/process.js b/lib/process.js index b96af5111..3a8cab8f7 100644 --- a/lib/process.js +++ b/lib/process.js @@ -48,7 +48,7 @@ ProcessInfo.buildProcessTree = function (infos) { throw new Error(`Invalid entry in processinfo index: ${id}`) } const idx = index.processes[id] - node.nodes = idx.children.map(id => infos[id]) + node.nodes = idx.children.map(id => infos[id]).sort((a, b) => a.time - b.time) if (!node.parent) { treeRoot.nodes.push(node) } From 6fa4332694467bb3659d2a6635b914383777c29d Mon Sep 17 00:00:00 2001 From: isaacs Date: Fri, 5 Apr 2019 20:32:58 -0700 Subject: [PATCH 5/6] make the linter happy --- test/processinfo.js | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/test/processinfo.js b/test/processinfo.js index f3a7b151f..7117beea6 100644 --- a/test/processinfo.js +++ b/test/processinfo.js @@ -1,6 +1,6 @@ -const {resolve} = require('path') +const { resolve } = require('path') const bin = resolve(__dirname, '../self-coverage/bin/nyc') -const {spawn} = require('child_process') +const { spawn } = require('child_process') const t = require('tap') const rimraf = require('rimraf') const node = process.execPath @@ -15,13 +15,13 @@ t.teardown(() => rimraf.sync(resolve(fixturesCLI, tmp))) t.test('build some processinfo', t => { var args = [ bin, '-t', tmp, '--build-process-tree', - node, 'selfspawn-fibonacci.js', '5', + node, 'selfspawn-fibonacci.js', '5' ] var proc = spawn(process.execPath, args, { cwd: fixturesCLI, env: { PATH: process.env.PATH, - NYC_PROCESSINFO_EXTERNAL_ID: 'blorp', + NYC_PROCESSINFO_EXTERNAL_ID: 'blorp' } }) // don't actually care about the output for this test, just the data @@ -41,15 +41,17 @@ t.test('validate the created processinfo data', t => { covs.forEach(f => { fs.readFile(resolve(fixturesCLI, tmp, f), 'utf8', (er, covjson) => { - if (er) + if (er) { throw er + } const covdata = JSON.parse(covjson) t.same(Object.keys(covdata), [resolvedJS]) // should have matching processinfo for each cov json const procInfoFile = resolve(fixturesCLI, tmp, 'processinfo', f) fs.readFile(procInfoFile, 'utf8', (er, procInfoJson) => { - if (er) + if (er) { throw er + } const procInfoData = JSON.parse(procInfoJson) t.match(procInfoData, { pid: Number, @@ -58,14 +60,14 @@ t.test('validate the created processinfo data', t => { argv: [ node, resolvedJS, - /[1-5]/, + /[1-5]/ ], execArgv: [], cwd: fixturesCLI, time: Number, root: /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/, coverageFilename: resolve(fixturesCLI, tmp, f), - files: [ resolvedJS ], + files: [ resolvedJS ] }) }) }) @@ -80,14 +82,14 @@ t.test('check out the index', t => { t.match(index, { processes: {}, files: { - [resolvedJS]: [u, u, u, u, u, u, u, u, u ], + [resolvedJS]: [ u, u, u, u, u, u, u, u, u ] }, externalIds: { blorp: { root: u, - children: [u, u, u, u, u, u, u, u ], - }, - }, + children: [ u, u, u, u, u, u, u, u ] + } + } }) t.end() }) From 967bcbf024d4c9c2f0aac7684b8908ae1a834b25 Mon Sep 17 00:00:00 2001 From: isaacs Date: Sat, 6 Apr 2019 12:49:03 -0700 Subject: [PATCH 6/6] remove unreachable code nit --- index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.js b/index.js index ba666ffdc..2502c17a3 100755 --- a/index.js +++ b/index.js @@ -469,7 +469,7 @@ NYC.prototype.writeProcessIndex = function () { children: info.children } } - index.processes[info.uuid].children = Array.from(info.children || []) + index.processes[info.uuid].children = Array.from(info.children) return index }, { processes: {}, files: files, externalIds: {} })