Skip to content

Commit

Permalink
BREAKING CHANGE: assign env in worker instead of main process (#1391)
Browse files Browse the repository at this point in the history
  • Loading branch information
antfu committed May 29, 2022
1 parent 64bee41 commit b7e3377
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 58 deletions.
20 changes: 0 additions & 20 deletions packages/vitest/src/node/cli-api.ts
@@ -1,4 +1,3 @@
import { execa } from 'execa'
import type { UserConfig as ViteUserConfig } from 'vite'
import type { UserConfig } from '../types'
import { ensurePackageInstalled } from '../utils'
Expand Down Expand Up @@ -30,28 +29,11 @@ export async function startVitest(cliFilters: string[], options: CliOptions, vit

const ctx = await createVitest(options, viteOverrides)

process.env.VITEST_MODE = ctx.config.watch ? 'WATCH' : 'RUN'

if (ctx.config.env)
Object.assign(process.env, ctx.config.env)

if (ctx.config.coverage.enabled) {
if (!await ensurePackageInstalled('c8')) {
process.exitCode = 1
return false
}

if (!process.env.NODE_V8_COVERAGE) {
process.env.NODE_V8_COVERAGE = ctx.config.coverage.tempDirectory
// thread enable test will exec in thread
// so don't need to restarting Vitest
if (!ctx.config.threads) {
await ctx.server.close()
const { exitCode } = await execa(process.argv0, process.argv.slice(1), { stdio: 'inherit', reject: false })
process.exitCode = exitCode
return false
}
}
}

if (ctx.config.environment && ctx.config.environment !== 'node') {
Expand All @@ -64,8 +46,6 @@ export async function startVitest(cliFilters: string[], options: CliOptions, vit
if (process.stdin.isTTY && ctx.config.watch)
registerConsoleShortcuts(ctx)

process.chdir(ctx.config.root)

ctx.onServerRestarted(() => {
// TODO: re-consider how to re-run the tests the server smartly
ctx.start(cliFilters)
Expand Down
75 changes: 37 additions & 38 deletions packages/vitest/src/node/pool.ts
Expand Up @@ -27,7 +27,7 @@ export function createPool(ctx: Vitest): WorkerPool {

const options: TinypoolOptions = {
filename: workerPath,
// TODO: investigate futher
// TODO: investigate further
// It seems atomics introduced V8 Fatal Error https://github.com/vitest-dev/vitest/issues/1191
useAtomics: false,

Expand All @@ -46,50 +46,49 @@ export function createPool(ctx: Vitest): WorkerPool {
options.minThreads = 1
}

if (ctx.config.coverage)
process.env.NODE_V8_COVERAGE ||= ctx.config.coverage.tempDirectory

options.env = {
TEST: 'true',
VITEST: 'true',
NODE_ENV: ctx.config.mode || 'test',
VITEST_MODE: ctx.config.watch ? 'WATCH' : 'RUN',
...process.env,
...ctx.config.env,
}

const pool = new Tinypool(options)

const runWithFiles = (name: string): RunWithFiles => {
return async (files, invalidates) => {
let id = 0
const config = ctx.getSerializableConfig()
let id = 0
const config = ctx.getSerializableConfig()

async function runFiles(files: string[], invalidates: string[] = []) {
const { workerPort, port } = createChannel(ctx)
const data: WorkerContext = {
port: workerPort,
config,
files,
invalidates,
id: ++id,
}
try {
await pool.run(data, { transferList: [workerPort], name })
}
finally {
port.close()
workerPort.close()
}
}

return async (files, invalidates) => {
if (!ctx.config.threads) {
const { workerPort, port } = createChannel(ctx)
const data: WorkerContext = {
port: workerPort,
config,
files,
invalidates,
id: ++id,
}
try {
await pool.run(data, { transferList: [workerPort], name })
}
finally {
port.close()
workerPort.close()
}
await runFiles(files)
}
else {
const results = await Promise.allSettled(files.map(async (file) => {
const { workerPort, port } = createChannel(ctx)

const data: WorkerContext = {
port: workerPort,
config,
files: [file],
invalidates,
id: ++id,
}

try {
await pool.run(data, { transferList: [workerPort], name })
}
finally {
port.close()
workerPort.close()
}
}))
const results = await Promise.allSettled(files
.map(file => runFiles([file], invalidates)))

const errors = results.filter((r): r is PromiseRejectedResult => r.status === 'rejected').map(r => r.reason)
if (errors.length > 0)
Expand Down

0 comments on commit b7e3377

Please sign in to comment.