From b1a6c84c3b55f9a79f63e1fd9bd11ac9ef151e92 Mon Sep 17 00:00:00 2001 From: Anthony Fu Date: Wed, 13 Dec 2023 15:46:00 +0100 Subject: [PATCH] feat: log re-optimization reasons (#15339) --- packages/vite/src/node/optimizer/index.ts | 107 +++++++++++++++------- 1 file changed, 75 insertions(+), 32 deletions(-) diff --git a/packages/vite/src/node/optimizer/index.ts b/packages/vite/src/node/optimizer/index.ts index 5b6cf3b729a7df..5a98810777a314 100644 --- a/packages/vite/src/node/optimizer/index.ts +++ b/packages/vite/src/node/optimizer/index.ts @@ -191,6 +191,16 @@ export interface DepOptimizationMetadata { * This is checked on server startup to avoid unnecessary re-bundles. */ hash: string + /** + * This hash is determined by dependency lockfiles. + * This is checked on server startup to avoid unnecessary re-bundles. + */ + lockfileHash: string + /** + * This hash is determined by user config. + * This is checked on server startup to avoid unnecessary re-bundles. + */ + configHash: string /** * The browser hash is determined by the main hash plus additional dependencies * discovered at runtime. This is used to invalidate browser requests to @@ -310,9 +320,11 @@ export function initDepsOptimizerMetadata( ssr: boolean, timestamp?: string, ): DepOptimizationMetadata { - const hash = getDepHash(config, ssr) + const { lockfileHash, configHash, hash } = getDepHash(config, ssr) return { hash, + lockfileHash, + configHash, browserHash: getOptimizedBrowserHash(hash, {}, timestamp), optimized: {}, chunks: {}, @@ -363,11 +375,21 @@ export async function loadCachedDepOptimizationMetadata( ) } catch (e) {} // hash is consistent, no need to re-bundle - if (cachedMetadata && cachedMetadata.hash === getDepHash(config, ssr)) { - log?.('Hash is consistent. Skipping. Use --force to override.') - // Nothing to commit or cancel as we are using the cache, we only - // need to resolve the processing promise so requests can move on - return cachedMetadata + if (cachedMetadata) { + if (cachedMetadata.lockfileHash !== getLockfileHash(config, ssr)) { + config.logger.info( + 'Re-optimizing dependencies because lockfile has changed', + ) + } else if (cachedMetadata.configHash !== getConfigHash(config, ssr)) { + config.logger.info( + 'Re-optimizing dependencies because vite config has changed', + ) + } else { + log?.('Hash is consistent. Skipping. Use --force to override.') + // Nothing to commit or cancel as we are using the cache, we only + // need to resolve the processing promise so requests can move on + return cachedMetadata + } } } else { config.logger.info('Forced re-optimization of dependencies') @@ -417,7 +439,7 @@ export function toDiscoveredDependencies( timestamp?: string, ): Record { const browserHash = getOptimizedBrowserHash( - getDepHash(config, ssr), + getDepHash(config, ssr).hash, deps, timestamp, ) @@ -975,17 +997,15 @@ function parseDepsOptimizerMetadata( jsonMetadata: string, depsCacheDir: string, ): DepOptimizationMetadata | undefined { - const { hash, browserHash, optimized, chunks } = JSON.parse( - jsonMetadata, - (key: string, value: string) => { + const { hash, lockfileHash, configHash, browserHash, optimized, chunks } = + JSON.parse(jsonMetadata, (key: string, value: string) => { // Paths can be absolute or relative to the deps cache dir where // the _metadata.json is located if (key === 'file' || key === 'src') { return normalizePath(path.resolve(depsCacheDir, value)) } return value - }, - ) + }) if ( !chunks || Object.values(optimized).some((depInfo: any) => !depInfo.fileHash) @@ -995,6 +1015,8 @@ function parseDepsOptimizerMetadata( } const metadata = { hash, + lockfileHash, + configHash, browserHash, optimized: {}, discovered: {}, @@ -1029,10 +1051,13 @@ function stringifyDepsOptimizerMetadata( metadata: DepOptimizationMetadata, depsCacheDir: string, ) { - const { hash, browserHash, optimized, chunks } = metadata + const { hash, configHash, lockfileHash, browserHash, optimized, chunks } = + metadata return JSON.stringify( { hash, + configHash, + lockfileHash, browserHash, optimized: Object.fromEntries( Object.values(optimized).map( @@ -1187,27 +1212,11 @@ const lockfileFormats = [ }) const lockfileNames = lockfileFormats.map((l) => l.name) -export function getDepHash(config: ResolvedConfig, ssr: boolean): string { - const lockfilePath = lookupFile(config.root, lockfileNames) - let content = lockfilePath ? fs.readFileSync(lockfilePath, 'utf-8') : '' - if (lockfilePath) { - const lockfileName = path.basename(lockfilePath) - const { checkPatches } = lockfileFormats.find( - (f) => f.name === lockfileName, - )! - if (checkPatches) { - // Default of https://github.com/ds300/patch-package - const fullPath = path.join(path.dirname(lockfilePath), 'patches') - const stat = tryStatSync(fullPath) - if (stat?.isDirectory()) { - content += stat.mtimeMs.toString() - } - } - } - // also take config into account +function getConfigHash(config: ResolvedConfig, ssr: boolean): string { + // Take config into account // only a subset of config options that can affect dep optimization const optimizeDeps = getDepOptimizationConfig(config, ssr) - content += JSON.stringify( + const content = JSON.stringify( { mode: process.env.NODE_ENV || config.mode, root: config.root, @@ -1238,6 +1247,40 @@ export function getDepHash(config: ResolvedConfig, ssr: boolean): string { return getHash(content) } +function getLockfileHash(config: ResolvedConfig, ssr: boolean): string { + const lockfilePath = lookupFile(config.root, lockfileNames) + let content = lockfilePath ? fs.readFileSync(lockfilePath, 'utf-8') : '' + if (lockfilePath) { + const lockfileName = path.basename(lockfilePath) + const { checkPatches } = lockfileFormats.find( + (f) => f.name === lockfileName, + )! + if (checkPatches) { + // Default of https://github.com/ds300/patch-package + const fullPath = path.join(path.dirname(lockfilePath), 'patches') + const stat = tryStatSync(fullPath) + if (stat?.isDirectory()) { + content += stat.mtimeMs.toString() + } + } + } + return getHash(content) +} + +function getDepHash( + config: ResolvedConfig, + ssr: boolean, +): { lockfileHash: string; configHash: string; hash: string } { + const lockfileHash = getLockfileHash(config, ssr) + const configHash = getConfigHash(config, ssr) + const hash = getHash(lockfileHash + configHash) + return { + hash, + lockfileHash, + configHash, + } +} + function getOptimizedBrowserHash( hash: string, deps: Record,