|
| 1 | +/** |
| 2 | + * @license |
| 3 | + * Copyright Google LLC All Rights Reserved. |
| 4 | + * |
| 5 | + * Use of this source code is governed by an MIT-style license that can be |
| 6 | + * found in the LICENSE file at https://angular.dev/license |
| 7 | + */ |
| 8 | + |
| 9 | +import assert from 'node:assert'; |
| 10 | +import { rollup } from 'rollup'; |
| 11 | +import { |
| 12 | + BuildOutputFile, |
| 13 | + BuildOutputFileType, |
| 14 | + BundleContextResult, |
| 15 | + InitialFileRecord, |
| 16 | +} from '../../tools/esbuild/bundler-context'; |
| 17 | +import { createOutputFile } from '../../tools/esbuild/utils'; |
| 18 | +import { assertIsError } from '../../utils/error'; |
| 19 | + |
| 20 | +export async function optimizeChunks( |
| 21 | + original: BundleContextResult, |
| 22 | + sourcemap: boolean | 'hidden', |
| 23 | +): Promise<BundleContextResult> { |
| 24 | + // Failed builds cannot be optimized |
| 25 | + if (original.errors) { |
| 26 | + return original; |
| 27 | + } |
| 28 | + |
| 29 | + // Find the main browser entrypoint |
| 30 | + let mainFile; |
| 31 | + for (const [file, record] of original.initialFiles) { |
| 32 | + if ( |
| 33 | + record.name === 'main' && |
| 34 | + record.entrypoint && |
| 35 | + !record.serverFile && |
| 36 | + record.type === 'script' |
| 37 | + ) { |
| 38 | + mainFile = file; |
| 39 | + break; |
| 40 | + } |
| 41 | + } |
| 42 | + |
| 43 | + // No action required if no browser main entrypoint |
| 44 | + if (!mainFile) { |
| 45 | + return original; |
| 46 | + } |
| 47 | + |
| 48 | + const chunks: Record<string, BuildOutputFile> = {}; |
| 49 | + const maps: Record<string, BuildOutputFile> = {}; |
| 50 | + for (const originalFile of original.outputFiles) { |
| 51 | + if (originalFile.type !== BuildOutputFileType.Browser) { |
| 52 | + continue; |
| 53 | + } |
| 54 | + |
| 55 | + if (originalFile.path.endsWith('.js')) { |
| 56 | + chunks[originalFile.path] = originalFile; |
| 57 | + } else if (originalFile.path.endsWith('.js.map')) { |
| 58 | + // Create mapping of JS file to sourcemap content |
| 59 | + maps[originalFile.path.slice(0, -4)] = originalFile; |
| 60 | + } |
| 61 | + } |
| 62 | + |
| 63 | + const usedChunks = new Set<string>(); |
| 64 | + |
| 65 | + let bundle; |
| 66 | + let optimizedOutput; |
| 67 | + try { |
| 68 | + bundle = await rollup({ |
| 69 | + input: mainFile, |
| 70 | + plugins: [ |
| 71 | + { |
| 72 | + name: 'angular-bundle', |
| 73 | + resolveId(source) { |
| 74 | + // Remove leading `./` if present |
| 75 | + const file = source[0] === '.' && source[1] === '/' ? source.slice(2) : source; |
| 76 | + |
| 77 | + if (chunks[file]) { |
| 78 | + return file; |
| 79 | + } |
| 80 | + |
| 81 | + // All other identifiers are considered external to maintain behavior |
| 82 | + return { id: source, external: true }; |
| 83 | + }, |
| 84 | + load(id) { |
| 85 | + assert( |
| 86 | + chunks[id], |
| 87 | + `Angular chunk content should always be present in chunk optimizer [${id}].`, |
| 88 | + ); |
| 89 | + |
| 90 | + usedChunks.add(id); |
| 91 | + |
| 92 | + const result = { |
| 93 | + code: chunks[id].text, |
| 94 | + map: maps[id]?.text, |
| 95 | + }; |
| 96 | + |
| 97 | + return result; |
| 98 | + }, |
| 99 | + }, |
| 100 | + ], |
| 101 | + }); |
| 102 | + |
| 103 | + const result = await bundle.generate({ |
| 104 | + compact: true, |
| 105 | + sourcemap, |
| 106 | + chunkFileNames(chunkInfo) { |
| 107 | + // Do not add hash to file name if already present |
| 108 | + return /-[a-zA-Z0-9]{8}$/.test(chunkInfo.name) ? '[name].js' : '[name]-[hash].js'; |
| 109 | + }, |
| 110 | + }); |
| 111 | + optimizedOutput = result.output; |
| 112 | + } catch (e) { |
| 113 | + assertIsError(e); |
| 114 | + |
| 115 | + return { |
| 116 | + errors: [ |
| 117 | + // Most of these fields are not actually needed for printing the error |
| 118 | + { |
| 119 | + id: '', |
| 120 | + text: 'Chunk optimization failed', |
| 121 | + detail: undefined, |
| 122 | + pluginName: '', |
| 123 | + location: null, |
| 124 | + notes: [ |
| 125 | + { |
| 126 | + text: e.message, |
| 127 | + location: null, |
| 128 | + }, |
| 129 | + ], |
| 130 | + }, |
| 131 | + ], |
| 132 | + warnings: original.warnings, |
| 133 | + }; |
| 134 | + } finally { |
| 135 | + await bundle?.close(); |
| 136 | + } |
| 137 | + |
| 138 | + // Remove used chunks and associated sourcemaps from the original result |
| 139 | + original.outputFiles = original.outputFiles.filter( |
| 140 | + (file) => |
| 141 | + !usedChunks.has(file.path) && |
| 142 | + !(file.path.endsWith('.map') && usedChunks.has(file.path.slice(0, -4))), |
| 143 | + ); |
| 144 | + |
| 145 | + // Add new optimized chunks |
| 146 | + const importsPerFile: Record<string, string[]> = {}; |
| 147 | + for (const optimizedFile of optimizedOutput) { |
| 148 | + if (optimizedFile.type !== 'chunk') { |
| 149 | + continue; |
| 150 | + } |
| 151 | + |
| 152 | + importsPerFile[optimizedFile.fileName] = optimizedFile.imports; |
| 153 | + |
| 154 | + original.outputFiles.push( |
| 155 | + createOutputFile(optimizedFile.fileName, optimizedFile.code, BuildOutputFileType.Browser), |
| 156 | + ); |
| 157 | + if (optimizedFile.map && optimizedFile.sourcemapFileName) { |
| 158 | + original.outputFiles.push( |
| 159 | + createOutputFile( |
| 160 | + optimizedFile.sourcemapFileName, |
| 161 | + optimizedFile.map.toString(), |
| 162 | + BuildOutputFileType.Browser, |
| 163 | + ), |
| 164 | + ); |
| 165 | + } |
| 166 | + } |
| 167 | + |
| 168 | + // Update initial files to reflect optimized chunks |
| 169 | + const entriesToAnalyze: [string, InitialFileRecord][] = []; |
| 170 | + for (const usedFile of usedChunks) { |
| 171 | + // Leave the main file since its information did not change |
| 172 | + if (usedFile === mainFile) { |
| 173 | + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion |
| 174 | + entriesToAnalyze.push([mainFile, original.initialFiles.get(mainFile)!]); |
| 175 | + continue; |
| 176 | + } |
| 177 | + |
| 178 | + // Remove all other used chunks |
| 179 | + original.initialFiles.delete(usedFile); |
| 180 | + } |
| 181 | + |
| 182 | + // Analyze for transitive initial files |
| 183 | + let currentEntry; |
| 184 | + while ((currentEntry = entriesToAnalyze.pop())) { |
| 185 | + const [entryPath, entryRecord] = currentEntry; |
| 186 | + |
| 187 | + for (const importPath of importsPerFile[entryPath]) { |
| 188 | + const existingRecord = original.initialFiles.get(importPath); |
| 189 | + if (existingRecord) { |
| 190 | + // Store the smallest value depth |
| 191 | + if (existingRecord.depth > entryRecord.depth + 1) { |
| 192 | + existingRecord.depth = entryRecord.depth + 1; |
| 193 | + } |
| 194 | + |
| 195 | + continue; |
| 196 | + } |
| 197 | + |
| 198 | + const record: InitialFileRecord = { |
| 199 | + type: 'script', |
| 200 | + entrypoint: false, |
| 201 | + external: false, |
| 202 | + serverFile: false, |
| 203 | + depth: entryRecord.depth + 1, |
| 204 | + }; |
| 205 | + |
| 206 | + entriesToAnalyze.push([importPath, record]); |
| 207 | + } |
| 208 | + } |
| 209 | + |
| 210 | + return original; |
| 211 | +} |
0 commit comments