From b7c6629208cd53eb65dd8367c4a3634354b4b9a4 Mon Sep 17 00:00:00 2001 From: patak Date: Tue, 23 Jan 2024 16:53:54 +0100 Subject: [PATCH] feat(optimizer): holdUntilCrawlEnd option (#15244) --- docs/config/dep-optimization-options.md | 8 ++ packages/vite/src/node/config.ts | 1 + packages/vite/src/node/optimizer/index.ts | 11 ++ packages/vite/src/node/optimizer/optimizer.ts | 117 ++++++++++++++++-- 4 files changed, 124 insertions(+), 13 deletions(-) diff --git a/docs/config/dep-optimization-options.md b/docs/config/dep-optimization-options.md index 68ecc247b27760..f1c2de63e00fbc 100644 --- a/docs/config/dep-optimization-options.md +++ b/docs/config/dep-optimization-options.md @@ -62,6 +62,14 @@ Certain options are omitted since changing them would not be compatible with Vit Set to `true` to force dependency pre-bundling, ignoring previously cached optimized dependencies. +## optimizeDeps.holdUntilCrawlEnd + +- **Experimental** +- **Type:** `boolean` +- **Default:** `true` + +When enabled, it will hold the first optimized deps results until all static imports are crawled on cold start. This avoids the need for full-page reloads when new dependencies are discovered and they trigger the generation of new common chunks. If all dependencies are found by the scanner plus the explicitely defined ones in `include`, it is better to disable this option to let the browser process more requests in parallel. + ## optimizeDeps.disabled - **Deprecated** diff --git a/packages/vite/src/node/config.ts b/packages/vite/src/node/config.ts index c96dae1b191ba8..e910708eb7517a 100644 --- a/packages/vite/src/node/config.ts +++ b/packages/vite/src/node/config.ts @@ -766,6 +766,7 @@ export async function resolveConfig( packageCache, createResolver, optimizeDeps: { + holdUntilCrawlEnd: true, ...optimizeDeps, esbuildOptions: { preserveSymlinks: resolveOptions.preserveSymlinks, diff --git a/packages/vite/src/node/optimizer/index.ts b/packages/vite/src/node/optimizer/index.ts index c085b5d10b23a3..476a638ed0fd1b 100644 --- a/packages/vite/src/node/optimizer/index.ts +++ b/packages/vite/src/node/optimizer/index.ts @@ -132,6 +132,17 @@ export interface DepOptimizationConfig { * @experimental */ noDiscovery?: boolean + /** + * When enabled, it will hold the first optimized deps results until all static + * imports are crawled on cold start. This avoids the need for full-page reloads + * when new dependencies are discovered and they trigger the generation of new + * common chunks. If all dependencies are found by the scanner plus the explicitely + * defined ones in `include`, it is better to disable this option to let the + * browser process more requests in parallel. + * @default true + * @experimental + */ + holdUntilCrawlEnd?: boolean } export type DepOptimizationOptions = DepOptimizationConfig & { diff --git a/packages/vite/src/node/optimizer/optimizer.ts b/packages/vite/src/node/optimizer/optimizer.ts index 0ca16143d7b86e..721a2f45c8035c 100644 --- a/packages/vite/src/node/optimizer/optimizer.ts +++ b/packages/vite/src/node/optimizer/optimizer.ts @@ -93,6 +93,10 @@ async function createDepsOptimizer( let metadata = cachedMetadata || initDepsOptimizerMetadata(config, ssr, sessionTimestamp) + const options = getDepOptimizationConfig(config, ssr) + + const { noDiscovery, holdUntilCrawlEnd } = options + const depsOptimizer: DepsOptimizer = { metadata, registerMissingImport, @@ -103,7 +107,7 @@ async function createDepsOptimizer( `${depInfo.file}?v=${depInfo.browserHash}`, delayDepsOptimizerUntil, close, - options: getDepOptimizationConfig(config, ssr), + options, } depsOptimizerMap.set(config, depsOptimizer) @@ -126,6 +130,23 @@ async function createDepsOptimizer( } } + let discoveredDepsWhileScanning: string[] = [] + const logDiscoveredDepsWhileScanning = () => { + if (discoveredDepsWhileScanning.length) { + config.logger.info( + colors.green( + `✨ discovered while scanning: ${depsLogString( + discoveredDepsWhileScanning, + )}`, + ), + { + timestamp: true, + }, + ) + discoveredDepsWhileScanning = [] + } + } + let depOptimizationProcessing = promiseWithResolvers() let depOptimizationProcessingQueue: PromiseWithResolvers[] = [] const resolveEnqueuedProcessingPromises = () => { @@ -140,6 +161,7 @@ async function createDepsOptimizer( let currentlyProcessing = false let firstRunCalled = !!cachedMetadata + let warnAboutMissedDependencies = false // If this is a cold run, we wait for static imports discovered // from the first request before resolving to minimize full page reloads. @@ -180,17 +202,17 @@ async function createDepsOptimizer( // Initialize discovered deps with manually added optimizeDeps.include info - const deps: Record = {} - await addManuallyIncludedOptimizeDeps(deps, config, ssr) + const manuallyIncludedDeps: Record = {} + await addManuallyIncludedOptimizeDeps(manuallyIncludedDeps, config, ssr) - const discovered = toDiscoveredDependencies( + const manuallyIncludedDepsInfo = toDiscoveredDependencies( config, - deps, + manuallyIncludedDeps, ssr, sessionTimestamp, ) - for (const depInfo of Object.values(discovered)) { + for (const depInfo of Object.values(manuallyIncludedDepsInfo)) { addOptimizedDepInfo(metadata, 'discovered', { ...depInfo, processing: depOptimizationProcessing.promise, @@ -198,7 +220,7 @@ async function createDepsOptimizer( newDepsDiscovered = true } - if (config.optimizeDeps.noDiscovery) { + if (noDiscovery) { // We don't need to scan for dependencies or wait for the static crawl to end // Run the first optimization run immediately runOptimizer() @@ -214,6 +236,13 @@ async function createDepsOptimizer( const deps = await discover.result discover = undefined + const manuallyIncluded = Object.keys(manuallyIncludedDepsInfo) + discoveredDepsWhileScanning.push( + ...Object.keys(metadata.discovered).filter( + (dep) => !deps[dep] && !manuallyIncluded.includes(dep), + ), + ) + // Add these dependencies to the discovered list, as these are currently // used by the preAliasPlugin to support aliased and optimized deps. // This is also used by the CJS externalization heuristics in legacy mode @@ -224,12 +253,31 @@ async function createDepsOptimizer( } const knownDeps = prepareKnownDeps() + startNextDiscoveredBatch() // For dev, we run the scanner and the first optimization - // run on the background, but we wait until crawling has ended - // to decide if we send this result to the browser or we need to - // do another optimize step + // run on the background optimizationResult = runOptimizeDeps(config, knownDeps, ssr) + + // If the holdUntilCrawlEnd stratey is used, we wait until crawling has + // ended to decide if we send this result to the browser or we need to + // do another optimize step + if (!holdUntilCrawlEnd) { + // If not, we release the result to the browser as soon as the scanner + // is done. If the scanner missed any dependency, and a new dependency + // is discovered while crawling static imports, then there will be a + // full-page reload if new common chunks are generated between the old + // and new optimized deps. + optimizationResult.result.then((result) => { + // Check if the crawling of static imports has already finished. In that + // case, the result is handled by the onCrawlEnd callback + if (!crawlEndFinder) return + + optimizationResult = undefined // signal that we'll be using the result + + runOptimizer(result) + }) + } } catch (e) { logger.error(e.stack || e.message) } finally { @@ -394,6 +442,16 @@ async function createDepsOptimizer( newDepsToLogHandle = setTimeout(() => { newDepsToLogHandle = undefined logNewlyDiscoveredDeps() + if (warnAboutMissedDependencies) { + logDiscoveredDepsWhileScanning() + config.logger.info( + colors.magenta( + `❗ add these dependencies to optimizeDeps.include to speed up cold start`, + ), + { timestamp: true }, + ) + warnAboutMissedDependencies = false + } }, 2 * debounceMs) } else { debug( @@ -426,6 +484,16 @@ async function createDepsOptimizer( if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) newDepsToLogHandle = undefined logNewlyDiscoveredDeps() + if (warnAboutMissedDependencies) { + logDiscoveredDepsWhileScanning() + config.logger.info( + colors.magenta( + `❗ add these dependencies to optimizeDeps.include to avoid a full page reload during cold start`, + ), + { timestamp: true }, + ) + warnAboutMissedDependencies = false + } } logger.info( @@ -562,7 +630,7 @@ async function createDepsOptimizer( function debouncedProcessing(timeout = debounceMs) { // Debounced rerun, let other missing dependencies be discovered before - // the running next optimizeDeps + // the next optimizeDeps run enqueuedRerun = undefined if (debounceProcessingHandle) clearTimeout(debounceProcessingHandle) if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle) @@ -593,8 +661,17 @@ async function createDepsOptimizer( await depsOptimizer.scanProcessing if (optimizationResult && !config.optimizeDeps.noDiscovery) { - const result = await optimizationResult.result - optimizationResult = undefined + // In the holdUntilCrawlEnd strategy, we don't release the result of the + // post-scanner optimize step to the browser until we reach this point + // If there are new dependencies, we do another optimize run, if not, we + // use the post-scanner optimize result + // If holdUntilCrawlEnd is false and we reach here, it means that the + // scan+optimize step finished after crawl end. We follow the same + // process as in the holdUntilCrawlEnd in this case. + const afterScanResult = optimizationResult.result + optimizationResult = undefined // signal that we'll be using the result + + const result = await afterScanResult currentlyProcessing = false const crawlDeps = Object.keys(metadata.discovered) @@ -649,6 +726,20 @@ async function createDepsOptimizer( startNextDiscoveredBatch() runOptimizer(result) } + } else if (!holdUntilCrawlEnd) { + // The post-scanner optimize result has been released to the browser + // If new deps have been discovered, issue a regular rerun of the + // optimizer. A full page reload may still be avoided if the new + // optimize result is compatible in this case + if (newDepsDiscovered) { + debug?.( + colors.green( + `✨ new dependencies were found while crawling static imports, re-running optimizer`, + ), + ) + warnAboutMissedDependencies = true + debouncedProcessing(0) + } } else { const crawlDeps = Object.keys(metadata.discovered) currentlyProcessing = false