diff --git a/bin/nyc.js b/bin/nyc.js index c285c5abe..99c667e0d 100755 --- a/bin/nyc.js +++ b/bin/nyc.js @@ -65,6 +65,10 @@ if ([ ), function (done) { var mainChildExitCode = process.exitCode + if (argv.showProcessTree || argv.buildProcessTree) { + nyc.writeProcessIndex() + } + if (argv.checkCoverage) { checkCoverage(argv) process.exitCode = process.exitCode || mainChildExitCode diff --git a/bin/wrap.js b/bin/wrap.js index b4c916156..c15862c47 100644 --- a/bin/wrap.js +++ b/bin/wrap.js @@ -12,6 +12,10 @@ config._processInfo = { ppid: parentPid, root: process.env.NYC_ROOT_ID } +if (process.env.NYC_PROCESSINFO_EXTERNAL_ID) { + config._processInfo.externalId = process.env.NYC_PROCESSINFO_EXTERNAL_ID + delete process.env.NYC_PROCESSINFO_EXTERNAL_ID +} ;(new NYC(config)).wrap() diff --git a/index.js b/index.js index 00e6cc81f..aaf8c21f9 100755 --- a/index.js +++ b/index.js @@ -413,6 +413,105 @@ NYC.prototype.report = function () { } } +// XXX(@isaacs) Index generation should move to istanbul-lib-processinfo +NYC.prototype.writeProcessIndex = function () { + const dir = this.processInfoDirectory() + const pidToUid = new Map() + const infoByUid = new Map() + const eidToUid = new Map() + const infos = fs.readdirSync(dir).filter(f => f !== 'index.json').map(f => { + try { + const info = JSON.parse(fs.readFileSync(path.resolve(dir, f), 'utf-8')) + // on thiis first read pass, also map the pids to uuids + info.uuid = path.basename(f, '.json') + pidToUid.set(info.uuid, info.pid) + pidToUid.set(info.pid, info.uuid) + infoByUid.set(info.uuid, info) + if (info.externalId) { + eidToUid.set(info.externalId, info.uuid) + } + return info + } catch (er) { + return null + } + }).filter(Boolean) + + // create all the parent-child links and write back the updated info + const needsUpdate = new Set() + infos.forEach(info => { + if (info.ppid && info.ppid !== '0' && !info.parent) { + info.parent = pidToUid.get(info.ppid) + needsUpdate.add(info) + } + if (info.parent) { + const parentInfo = infoByUid.get(info.parent) + if (parentInfo.children.indexOf(info.uuid) === -1) { + parentInfo.children.push(info.uuid) + needsUpdate.add(parentInfo) + } + } + }) + + // figure out which files were touched by each process. + const files = infos.reduce((files, info) => { + if (!info.files) { + try { + info.files = Object.keys(JSON.parse(fs.readFileSync( + path.resolve(this.tempDirectory(), info.coverageFilename), + 'utf-8' + ))) + } catch (er) { + return files + } + needsUpdate.add(info) + } + info.files.forEach(f => { + files[f] = files[f] || [] + files[f].push(info.uuid) + }) + return files + }, {}) + + // build the actual index! + const index = infos.reduce((index, info) => { + index.processes[info.uuid] = {} + if (info.parent) { + index.processes[info.uuid].parent = info.parent + } + if (info.externalId) { + index.processes[info.uuid].externalId = info.externalId + index.externalIds[info.externalId] = { + root: info.uuid, + children: info.children + } + } + if (info.children && info.children.length) { + index.processes[info.uuid].children = Array.from(info.children) + } + return index + }, { processes: {}, files: files, externalIds: {} }) + + // flatten the descendant sets of all the externalId procs + Object.keys(index.externalIds).forEach(eid => { + const { children } = index.externalIds[eid] + // push the next generation onto the list so we accumulate them all + for (let i = 0; i < children.length; i++) { + const nextGen = index.processes[children[i]].children + if (nextGen && nextGen.length) { + children.push(...nextGen.filter(uuid => children.indexOf(uuid) === -1)) + } + } + }) + + needsUpdate.forEach(info => { + fs.writeFileSync( + path.resolve(dir, info.uuid + '.json'), JSON.stringify(info) + ) + }) + + fs.writeFileSync(path.resolve(dir, 'index.json'), JSON.stringify(index)) +} + NYC.prototype.showProcessTree = function () { var processTree = ProcessInfo.buildProcessTree(this._loadProcessInfos()) @@ -452,7 +551,7 @@ NYC.prototype._loadProcessInfos = function () { var _this = this var files = fs.readdirSync(this.processInfoDirectory()) - return files.map(function (f) { + return files.filter(f => f !== 'index.json').map(function (f) { try { return new ProcessInfo(JSON.parse(fs.readFileSync( path.resolve(_this.processInfoDirectory(), f), diff --git a/lib/process.js b/lib/process.js index c77ca503f..fb62ab6a9 100644 --- a/lib/process.js +++ b/lib/process.js @@ -13,6 +13,7 @@ function ProcessInfo (defaults) { this.root = null this.coverageFilename = null this.nodes = [] // list of children, filled by buildProcessTree() + this.children = [] // just uuids, not full nodes this._coverageMap = null diff --git a/test/processinfo.js b/test/processinfo.js new file mode 100644 index 000000000..814f050d3 --- /dev/null +++ b/test/processinfo.js @@ -0,0 +1,96 @@ +const {resolve} = require('path') +const bin = resolve(__dirname, '../self-coverage/bin/nyc') +const {spawn} = require('child_process') +const t = require('tap') +const rimraf = require('rimraf') +const node = process.execPath +const fixturesCLI = resolve(__dirname, './fixtures/cli') +const tmp = 'processinfo-test' +const fs = require('fs') +const resolvedJS = resolve(fixturesCLI, 'selfspawn-fibonacci.js') + +rimraf.sync(resolve(fixturesCLI, tmp)) +t.teardown(() => rimraf.sync(resolve(fixturesCLI, tmp))) + +t.test('build some processinfo', t => { + var args = [ + bin, '-t', tmp, '--build-process-tree', + node, 'selfspawn-fibonacci.js', '5', + ] + var proc = spawn(process.execPath, args, { + cwd: fixturesCLI, + env: { + PATH: process.env.PATH, + NYC_PROCESSINFO_EXTERNAL_ID: 'blorp', + } + }) + // don't actually care about the output for this test, just the data + proc.stderr.resume() + proc.stdout.resume() + proc.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + t.end() + }) +}) + +t.test('validate the created processinfo data', t => { + const covs = fs.readdirSync(resolve(fixturesCLI, tmp)) + .filter(f => f !== 'processinfo') + t.plan(covs.length * 2) + + covs.forEach(f => { + fs.readFile(resolve(fixturesCLI, tmp, f), 'utf8', (er, covjson) => { + if (er) + throw er + const covdata = JSON.parse(covjson) + t.same(Object.keys(covdata), [resolvedJS]) + // should have matching processinfo for each cov json + const procInfoFile = resolve(fixturesCLI, tmp, 'processinfo', f) + fs.readFile(procInfoFile, 'utf8', (er, procInfoJson) => { + if (er) + throw er + const procInfoData = JSON.parse(procInfoJson) + t.match(procInfoData, { + pid: /^[0-9]+$/, + ppid: /^[0-9]+$/, + uuid: f.replace(/\.json$/, ''), + argv: [ + node, + resolvedJS, + /[1-5]/, + ], + execArgv: [], + cwd: fixturesCLI, + time: Number, + root: /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/, + coverageFilename: resolve(fixturesCLI, tmp, f), + nodes: [], + _coverageMap: null, + files: [ resolvedJS ], + children: Array, + }) + }) + }) + }) +}) + +t.test('check out the index', t => { + const indexFile = resolve(fixturesCLI, tmp, 'processinfo', 'index.json') + const indexJson = fs.readFileSync(indexFile, 'utf-8') + const index = JSON.parse(indexJson) + const u = /^[0-9a-f]{8}-(?:[0-9a-f]{4}-){3}[0-9a-f]{12}$/ + t.match(index, { + processes: {}, + files: { + [resolvedJS]: [u, u, u, u, u, u, u, u, u ], + }, + externalIds: { + blorp: { + root: u, + children: [u, u, u, u, u, u, u, u ], + }, + }, + }) + t.end() +})