Skip to content

Commit

Permalink
run optimized dependency install after build
Browse files Browse the repository at this point in the history
  • Loading branch information
FredKSchott committed Jun 20, 2020
1 parent 56c13c2 commit b1210da
Show file tree
Hide file tree
Showing 3 changed files with 97 additions and 86 deletions.
170 changes: 92 additions & 78 deletions src/commands/build.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ import path from 'path';
import rimraf from 'rimraf';
import {BuildScript} from '../config';
import {transformEsmImports} from '../rewrite-imports';
import {BUILD_DEPENDENCIES_DIR, CommandOptions, ImportMap} from '../util';
import {printStats} from '../stats-formatter';
import {CommandOptions} from '../util';
import {
generateEnvModule,
getFileBuilderForWorker,
Expand All @@ -20,42 +21,44 @@ import {
} from './build-util';
import {stopEsbuild} from './esbuildPlugin';
import {createImportResolver} from './import-resolver';
import {command as installCommand} from './install';
import {getInstallTargets, run as installRunner} from './install';
import {paint} from './paint';
import srcFileExtensionMapping from './src-file-extension-mapping';

export async function command(commandOptions: CommandOptions) {
const {cwd, config} = commandOptions;

// Start with a fresh install of your dependencies, for production
const installCommandOptions = merge(commandOptions, {
config: {
installOptions: {
dest: BUILD_DEPENDENCIES_DIR,
env: {NODE_ENV: process.env.NODE_ENV || 'production'},
treeshake: config.installOptions.treeshake ?? true,
},
async function installOptimizedDependencies(
allBuiltJsFiles: [string, string][],
installDest: string,
commandOptions: CommandOptions,
) {
console.log('optimizing dependencies...');
const installConfig = merge(commandOptions.config, {
installOptions: {
dest: installDest,
env: {NODE_ENV: process.env.NODE_ENV || 'production'},
treeshake: commandOptions.config.installOptions.treeshake ?? true,
},
});
const dependencyImportMapLoc = path.join(config.installOptions.dest, 'import-map.json');

// Start with a fresh install of your dependencies, always.
console.log(chalk.yellow('! rebuilding dependencies...'));
await installCommand(installCommandOptions);
// 1. Scan imports from your final built JS files
const installTargets = await getInstallTargets(installConfig, allBuiltJsFiles);
// 2. Install dependencies, based on the scan of your final build
const installResult = await installRunner(
{...commandOptions, config: installConfig},
installTargets,
);
return installResult;
}

export async function command(commandOptions: CommandOptions) {
const {cwd, config} = commandOptions;
const messageBus = new EventEmitter();
const relevantWorkers: BuildScript[] = [];
const allBuildExtensions: string[] = [];

let dependencyImportMap: ImportMap = {imports: {}};
try {
dependencyImportMap = require(dependencyImportMapLoc);
} catch (err) {
// no import-map found, safe to ignore
}

for (const workerConfig of config.scripts) {
const {type, match} = workerConfig;
const {id, type, match} = workerConfig;
if (id === 'mount:web_modules') {
continue;
}
if (type === 'build' || type === 'run' || type === 'mount' || type === 'bundle') {
relevantWorkers.push(workerConfig);
}
Expand All @@ -65,6 +68,7 @@ export async function command(commandOptions: CommandOptions) {
}

let bundleWorker = config.scripts.find((s) => s.type === 'bundle');
let installWorker = config.scripts.find((s) => s.id === 'mount:web_modules')!;
const isBundledHardcoded = config.devOptions.bundle !== undefined;
const isBundled = isBundledHardcoded ? !!config.devOptions.bundle : !!bundleWorker;
if (!bundleWorker) {
Expand Down Expand Up @@ -170,6 +174,9 @@ export async function command(commandOptions: CommandOptions) {
const mountDirDetails: any[] = relevantWorkers
.map((scriptConfig) => {
const {id, type, args} = scriptConfig;
if (id === 'mount:web_modules') {
return false;
}
if (type !== 'mount') {
return false;
}
Expand All @@ -180,13 +187,12 @@ export async function command(commandOptions: CommandOptions) {
.filter(Boolean);

const includeFileSets: [string, string, string[]][] = [];
const allProxiedFiles = new Set<string>();
for (const [id, dirDisk, dirDest] of mountDirDetails) {
messageBus.emit('WORKER_UPDATE', {id, state: ['RUNNING', 'yellow']});
let allFiles;
try {
allFiles = glob.sync(`**/*`, {
ignore: id === 'mount:web_modules' ? [] : config.exclude,
ignore: config.exclude,
cwd: dirDisk,
absolute: true,
nodir: true,
Expand All @@ -197,12 +203,11 @@ export async function command(commandOptions: CommandOptions) {
allFiles.map(async (f) => {
f = path.resolve(f); // this is necessary since glob.sync() returns paths with / on windows. path.resolve() will switch them to the native path separator.
if (
!f.startsWith(commandOptions.config.installOptions.dest) &&
(allBuildExtensions.includes(path.extname(f).substr(1)) ||
path.extname(f) === '.jsx' ||
path.extname(f) === '.tsx' ||
path.extname(f) === '.ts' ||
path.extname(f) === '.js')
allBuildExtensions.includes(path.extname(f).substr(1)) ||
path.extname(f) === '.jsx' ||
path.extname(f) === '.tsx' ||
path.extname(f) === '.ts' ||
path.extname(f) === '.js'
) {
allBuildNeededFiles.push(f);
return;
Expand All @@ -227,9 +232,8 @@ export async function command(commandOptions: CommandOptions) {
}
}

const webModulesScript = config.scripts.find((script) => script.id === 'mount:web_modules')!;
const webModulesPath = webModulesScript.args.toUrl;
const allBuiltFromFiles = new Set<string>();
const allBuiltJsFiles: [string, string, string][] = [];
for (const workerConfig of relevantWorkers) {
const {id, match, type} = workerConfig;
if (type !== 'build' || match.length === 0) {
Expand Down Expand Up @@ -276,63 +280,69 @@ export async function command(commandOptions: CommandOptions) {
continue;
}

allBuiltFromFiles.add(fileLoc);
if (path.extname(outPath) === '.js') {
if (resources?.css) {
const cssOutPath = outPath.replace(/.js$/, '.css');
await fs.mkdir(path.dirname(cssOutPath), {recursive: true});
await fs.writeFile(cssOutPath, resources.css);
code = `import './${path.basename(cssOutPath)}';\n` + code;
}
const resolveImportSpecifier = createImportResolver({
fileLoc,
webModulesPath,
dependencyImportMap,
isDev: false,
isBundled,
config,
});
code = await transformEsmImports(code, (spec) => {
// Try to resolve the specifier to a known URL in the project
const resolvedImportUrl = resolveImportSpecifier(spec);
if (resolvedImportUrl) {
// We treat ".proxy.js" files special: we need to make sure that they exist on disk
// in the final build, so we mark them to be written to disk at the next step.
if (resolvedImportUrl.endsWith('.proxy.js')) {
allProxiedFiles.add(
resolvedImportUrl.startsWith('/')
? path.resolve(cwd, spec)
: path.resolve(path.dirname(outPath), spec),
);
}
return resolvedImportUrl;
}
// If that fails, return a placeholder import and attempt to resolve.
let [missingPackageName, ...deepPackagePathParts] = spec.split('/');
if (missingPackageName.startsWith('@')) {
missingPackageName += '/' + deepPackagePathParts.shift();
}
messageBus.emit('MISSING_WEB_MODULE', {
id: fileLoc,
data: {
spec: spec,
pkgName: missingPackageName,
},
});
// Sort of lazy, but we expect "MISSING_WEB_MODULE" to exit the build with an error.
// So, just return the original import here since it will never be seen.
return spec;
});
code = wrapImportMeta({code, env: true, hmr: false, config});
allBuiltJsFiles.push([outPath, code, fileLoc]);
} else {
await fs.mkdir(path.dirname(outPath), {recursive: true});
await fs.writeFile(outPath, code);
}
await fs.mkdir(path.dirname(outPath), {recursive: true});
await fs.writeFile(outPath, code);
allBuiltFromFiles.add(fileLoc);
}
}
messageBus.emit('WORKER_COMPLETE', {id, error: null});
}

stopEsbuild();

const webModulesPath = installWorker.args.toUrl;
const installDest = path.join(buildDirectoryLoc, webModulesPath);
const installResult = await installOptimizedDependencies(
(allBuiltJsFiles as any) as [string, string][],
installDest,
commandOptions,
);
if (!installResult.success || installResult.hasError) {
process.exit();
}

const allProxiedFiles = new Set<string>();
for (const [outLoc, code, fileLoc] of allBuiltJsFiles) {
const resolveImportSpecifier = createImportResolver({
fileLoc,
webModulesPath,
dependencyImportMap: installResult.importMap,
isDev: false,
isBundled,
config,
});
const resolvedCode = await transformEsmImports(code, (spec) => {
// Try to resolve the specifier to a known URL in the project
const resolvedImportUrl = resolveImportSpecifier(spec);
if (resolvedImportUrl) {
// We treat ".proxy.js" files special: we need to make sure that they exist on disk
// in the final build, so we mark them to be written to disk at the next step.
if (resolvedImportUrl.endsWith('.proxy.js')) {
allProxiedFiles.add(
resolvedImportUrl.startsWith('/')
? path.resolve(cwd, spec)
: path.resolve(path.dirname(outLoc), spec),
);
}
return resolvedImportUrl;
}
return spec;
});
await fs.mkdir(path.dirname(outLoc), {recursive: true});
await fs.writeFile(outLoc, resolvedCode);
}

for (const proxiedFileLoc of allProxiedFiles) {
const proxiedCode = await fs.readFile(proxiedFileLoc, {encoding: 'utf8'});
const proxiedExt = path.extname(proxiedFileLoc);
Expand All @@ -354,6 +364,10 @@ export async function command(commandOptions: CommandOptions) {
await fs.writeFile(proxyFileLoc, proxyCode, {encoding: 'utf8'});
}

if (installResult.stats) {
console.log(printStats(installResult.stats));
}

if (!isBundled) {
messageBus.emit('WORKER_COMPLETE', {id: bundleWorker.id, error: null});
messageBus.emit('WORKER_UPDATE', {
Expand Down
12 changes: 5 additions & 7 deletions src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import path from 'path';
import {Plugin as RollupPlugin} from 'rollup';
import yargs from 'yargs-parser';
import {esbuildPlugin} from './commands/esbuildPlugin';
import {BUILD_DEPENDENCIES_DIR, DEV_DEPENDENCIES_DIR} from './util';
import {DEV_DEPENDENCIES_DIR} from './util';

const CONFIG_NAME = 'snowpack';
const ALWAYS_EXCLUDE = ['**/node_modules/**/*', '**/.types/**/*'];
Expand Down Expand Up @@ -315,9 +315,7 @@ function handleLegacyProxyScripts(config: any) {
}

type RawScripts = Record<string, string>;
function normalizeScripts(cwd: string, scripts: RawScripts): BuildScript[] {
const dependenciesLoc =
process.env.NODE_ENV === 'production' ? BUILD_DEPENDENCIES_DIR : DEV_DEPENDENCIES_DIR;
export function normalizeScripts(cwd: string, scripts: RawScripts): BuildScript[] {
const processedScripts: BuildScript[] = [];
if (Object.keys(scripts).filter((k) => k.startsWith('bundle:')).length > 1) {
handleConfigError(`scripts can only contain 1 script of type "bundle:".`);
Expand Down Expand Up @@ -360,9 +358,9 @@ function normalizeScripts(cwd: string, scripts: RawScripts): BuildScript[] {
const dirUrl = to || `/${cmdArr[0]}`;

// mount:web_modules is a special case script where the fromDisk
// arg is harcoded to match the internal dependency dir
// arg is hard-coded to match the internal dependency directory.
if (scriptId === 'mount:web_modules') {
dirDisk = dependenciesLoc;
dirDisk = DEV_DEPENDENCIES_DIR;
}

newScriptConfig.args = {
Expand Down Expand Up @@ -394,7 +392,7 @@ function normalizeScripts(cwd: string, scripts: RawScripts): BuildScript[] {
match: ['web_modules'],
cmd: `mount $WEB_MODULES --to /web_modules`,
args: {
fromDisk: dependenciesLoc,
fromDisk: DEV_DEPENDENCIES_DIR,
toUrl: '/web_modules',
},
});
Expand Down
1 change: 0 additions & 1 deletion src/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ export const BUILD_CACHE = path.join(GLOBAL_CACHE_DIR, 'build-cache-1.4');

export const PROJECT_CACHE_DIR = projectCacheDir({name: 'snowpack'});
export const DEV_DEPENDENCIES_DIR = path.join(PROJECT_CACHE_DIR, 'dev');
export const BUILD_DEPENDENCIES_DIR = path.join(PROJECT_CACHE_DIR, 'build');
const LOCKFILE_HASH_FILE = '.hash';

export const HAS_CDN_HASH_REGEX = /\-[a-zA-Z0-9]{16,}/;
Expand Down

0 comments on commit b1210da

Please sign in to comment.