Skip to content

Commit

Permalink
refactor filer types (#55)
Browse files Browse the repository at this point in the history
  • Loading branch information
ryanatkn authored Sep 26, 2020
1 parent a7b95ea commit 68bca52
Show file tree
Hide file tree
Showing 3 changed files with 90 additions and 58 deletions.
48 changes: 28 additions & 20 deletions src/compile/compiler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,23 +38,22 @@ export interface Compiler {
}

export interface CompileResult {
// TODO might need to be a union with a type, like `extension: '.svelte'` with additional properties.
// Svelte compilation properties include `ast`, `warnings`, `vars`, and `stats`
files: CompiledFile[];
compilations: Compilation[];
}

// TODO name? so close to `CompileFile` - maybe that should be renamed `FileCompiler`?
export type CompiledFile = CompiledTextFile | CompiledBinaryFile;
export interface BaseCompiledFile {
export type Compilation = TextCompilation | BinaryCompilation;
export interface BaseCompilation {
id: string;
extension: string;
}
export interface CompiledTextFile extends BaseCompiledFile {
// TODO might need to be a union with a type, like `extension: '.svelte'` with additional properties.
// Svelte compilation properties include `ast`, `warnings`, `vars`, and `stats`
export interface TextCompilation extends BaseCompilation {
encoding: 'utf8';
contents: string;
sourceMapOf?: string; // TODO for source maps? hmm. maybe we want a union with an `isSourceMap` boolean flag?
sourceMapOf: string | null; // TODO for source maps? hmm. maybe we want a union with an `isSourceMap` boolean flag?
}
export interface CompiledBinaryFile extends BaseCompiledFile {
export interface BinaryCompilation extends BaseCompilation {
encoding: null;
contents: Buffer;
}
Expand Down Expand Up @@ -115,24 +114,25 @@ export const createCompiler = (opts: InitialOptions): Compiler => {
const output = await swc.transform(contents as string, finalSwcOptions);
const buildId = toBuildId(id);
const sourceMapBuildId = buildId + SOURCE_MAP_EXTENSION;
const files: CompiledFile[] = [
const compilations: Compilation[] = [
{
id: buildId,
extension: JS_EXTENSION,
encoding: 'utf8',
contents: output.map ? addSourceMapFooter(output.code, sourceMapBuildId) : output.code,
sourceMapOf: null,
},
];
if (output.map) {
files.push({
compilations.push({
id: sourceMapBuildId,
extension: SOURCE_MAP_EXTENSION,
encoding: 'utf8',
contents: output.map,
sourceMapOf: buildId,
});
}
return {files};
return {compilations};
}
case SVELTE_EXTENSION: {
let preprocessedCode: string;
Expand Down Expand Up @@ -166,11 +166,17 @@ export const createCompiler = (opts: InitialOptions): Compiler => {
const jsBuildId = toBuildId(id);
const cssBuildId = replaceExtension(jsBuildId, CSS_EXTENSION);

const files: CompiledFile[] = [
{id: jsBuildId, extension: JS_EXTENSION, encoding: 'utf8', contents: js.code},
const compilations: Compilation[] = [
{
id: jsBuildId,
extension: JS_EXTENSION,
encoding: 'utf8',
contents: js.code,
sourceMapOf: null,
},
];
if (sourceMap && js.map) {
files.push({
compilations.push({
id: jsBuildId + SOURCE_MAP_EXTENSION,
extension: SOURCE_MAP_EXTENSION,
encoding: 'utf8',
Expand All @@ -179,14 +185,15 @@ export const createCompiler = (opts: InitialOptions): Compiler => {
});
}
if (css.code) {
files.push({
compilations.push({
id: cssBuildId,
extension: CSS_EXTENSION,
encoding: 'utf8',
contents: css.code,
sourceMapOf: null,
});
if (sourceMap && css.map) {
files.push({
compilations.push({
id: cssBuildId + SOURCE_MAP_EXTENSION,
extension: SOURCE_MAP_EXTENSION,
encoding: 'utf8',
Expand All @@ -195,13 +202,13 @@ export const createCompiler = (opts: InitialOptions): Compiler => {
});
}
}
return {files};
return {compilations};
}
default: {
const buildId = toBuildId(id);
const extension = extname(id);
const encoding = inferEncoding(extension);
let file: CompiledFile;
let file: Compilation;
// TODO simplify this code if we add no additional proeprties - we may add stuff for source maps, though
switch (encoding) {
case 'utf8':
Expand All @@ -210,6 +217,7 @@ export const createCompiler = (opts: InitialOptions): Compiler => {
extension,
encoding,
contents: contents as string,
sourceMapOf: null,
};
break;
case null:
Expand All @@ -223,7 +231,7 @@ export const createCompiler = (opts: InitialOptions): Compiler => {
default:
throw new UnreachableError(encoding);
}
return {files: [file]};
return {compilations: [file]};
}
}
};
Expand Down
10 changes: 2 additions & 8 deletions src/devServer/devServer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,7 @@ import {cyan, yellow, gray} from '../colors/terminal.js';
import {Logger, SystemLogger} from '../utils/log.js';
import {stripAfter} from '../utils/string.js';
import {omitUndefined} from '../utils/object.js';
import {
Filer,
CompiledSourceFile,
getFileMimeType,
getFileBuffer,
getFileStats,
} from '../fs/Filer.js';
import {Filer, CompiledFile, getFileMimeType, getFileBuffer, getFileStats} from '../fs/Filer.js';

export interface DevServer {
server: Server;
Expand Down Expand Up @@ -119,7 +113,7 @@ const send404 = (req: IncomingMessage, res: ServerResponse, path: string) => {
res.end(`404 not found: ${req.url} -> ${path}`);
};

const send200 = async (_req: IncomingMessage, res: ServerResponse, file: CompiledSourceFile) => {
const send200 = async (_req: IncomingMessage, res: ServerResponse, file: CompiledFile) => {
const stats = await getFileStats(file);
const mimeType = getFileMimeType(file);
const headers: OutgoingHttpHeaders = {
Expand Down
90 changes: 60 additions & 30 deletions src/fs/Filer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@ import {UnreachableError} from '../utils/error.js';
import {Logger, SystemLogger} from '../utils/log.js';
import {magenta, red} from '../colors/terminal.js';
import {printError, printPath} from '../utils/print.js';
import {Compiler, CompiledTextFile, CompiledBinaryFile} from '../compile/compiler.js';
import {Compiler, TextCompilation, BinaryCompilation} from '../compile/compiler.js';
import {getMimeTypeByExtension} from './mime.js';
import {Encoding, inferEncoding} from './encoding.js';

export type SourceFile = SourceTextFile | SourceBinaryFile;
interface BaseSourceFile {
id: string;
extension: string;
compiledFiles: CompiledSourceFile[];
compiledFiles: CompiledFile[];
}
export interface SourceTextFile extends BaseSourceFile {
encoding: 'utf8';
Expand All @@ -40,16 +40,27 @@ export interface SourceBinaryFile extends BaseSourceFile {
buffer: Buffer;
}

export type CompiledSourceFile = CompiledSourceTextFile | CompiledSourceBinaryFile;
export interface CompiledSourceTextFile extends CompiledTextFile {
export type CompiledFile = CompiledTextFile | CompiledBinaryFile;
export interface BaseCompiledFile {
id: string;
extension: string;
stats: Stats | undefined; // `undefined` for lazy loading
buffer: Buffer | undefined; // `undefined` for lazy loading
mimeType: string | null | undefined; // `null` means unknown, `undefined` for lazy loading
buffer: Buffer | undefined; // `undefined` for lazy loading
}
export interface CompiledSourceBinaryFile extends CompiledBinaryFile {
stats: Stats | undefined; // `undefined` for lazy loading
export interface CompiledTextFile extends BaseCompiledFile {
// sourceFile: SourceTextFile; // TODO add this reference?
compilation: TextCompilation;
encoding: 'utf8';
contents: string;
sourceMapOf: string | null; // TODO for source maps? hmm. maybe we want a union with an `isSourceMap` boolean flag?
}
export interface CompiledBinaryFile extends BaseCompiledFile {
// sourceFile: SourceBinaryFile; // TODO add this reference?
compilation: BinaryCompilation;
encoding: null;
contents: Buffer;
buffer: Buffer;
mimeType: string | null | undefined; // `null` means unknown, `undefined` for lazy loading
}

interface Options {
Expand Down Expand Up @@ -85,7 +96,7 @@ export class Filer {
private readonly include: (id: string) => boolean;

private readonly sourceFiles: Map<string, SourceFile> = new Map();
private readonly compiledFiles: Map<string, CompiledSourceFile> = new Map();
private readonly compiledFiles: Map<string, CompiledFile> = new Map();

private initStatus: AsyncStatus = 'initial';

Expand All @@ -108,7 +119,7 @@ export class Filer {

// TODO support lazy loading for some files - how? via a regexp?
// this will probably need to be async when that's added
getCompiledFile(id: string): CompiledSourceFile | null {
getCompiledFile(id: string): CompiledFile | null {
return this.compiledFiles.get(id) || null;
}

Expand Down Expand Up @@ -310,18 +321,37 @@ export class Filer {

// Update the cache.
const oldFiles = sourceFile.compiledFiles;
// TODO maybe merge the interfaces for the `CompiledFile` and `CompiledSourceFile`,
// won't need to do this inefficient copying or change the shape of objects
sourceFile.compiledFiles = result.files.map((file) => {
switch (file.encoding) {
case 'utf8':
return {...file, stats: undefined, mimeType: undefined, buffer: undefined};
case null:
return {...file, stats: undefined, mimeType: undefined, buffer: file.contents};
default:
throw new UnreachableError(file);
}
});
sourceFile.compiledFiles = result.compilations.map(
(compilation): CompiledFile => {
switch (compilation.encoding) {
case 'utf8':
return {
id: compilation.id,
extension: compilation.extension,
encoding: compilation.encoding,
contents: compilation.contents,
sourceMapOf: compilation.sourceMapOf,
compilation,
stats: undefined,
mimeType: undefined, // TODO copy from old file?
buffer: undefined,
};
case null:
return {
id: compilation.id,
extension: compilation.extension,
encoding: compilation.encoding,
contents: compilation.contents,
compilation,
stats: undefined,
mimeType: undefined, // TODO copy from old file?
buffer: compilation.contents,
};
default:
throw new UnreachableError(compilation);
}
},
);

// Write to disk.
await syncFilesToDisk(sourceFile.compiledFiles, oldFiles, this.log);
Expand Down Expand Up @@ -358,8 +388,8 @@ const sourceMapsAreBuilt = async (sourceFile: SourceFile): Promise<boolean> => {
// Given `newFiles` and `oldFiles`, updates everything on disk,
// deleting files that no longer exist, writing new ones, and updating existing ones.
const syncFilesToDisk = async (
newFiles: CompiledSourceFile[],
oldFiles: CompiledSourceFile[],
newFiles: CompiledFile[],
oldFiles: CompiledFile[],
log: Logger,
): Promise<void> => {
// This uses `Array#find` because the arrays are expected to be small,
Expand Down Expand Up @@ -400,9 +430,9 @@ const syncFilesToDisk = async (
// Given `newFiles` and `oldFiles`, updates the memory cache,
// deleting files that no longer exist and setting the new ones, replacing any old ones.
const syncFilesToMemoryCache = async (
compiledFiles: Map<string, CompiledSourceFile>,
newFiles: CompiledSourceFile[],
oldFiles: CompiledSourceFile[],
compiledFiles: Map<string, CompiledFile>,
newFiles: CompiledFile[],
oldFiles: CompiledFile[],
_log: Logger,
): Promise<void> => {
// This uses `Array#find` because the arrays are expected to be small,
Expand All @@ -420,16 +450,16 @@ const syncFilesToMemoryCache = async (
}
};

export const getFileMimeType = (file: CompiledSourceFile): string | null =>
export const getFileMimeType = (file: CompiledFile): string | null =>
file.mimeType !== undefined
? file.mimeType
: (file.mimeType = getMimeTypeByExtension(file.extension.substring(1)));

export const getFileBuffer = (file: CompiledSourceFile): Buffer =>
export const getFileBuffer = (file: CompiledFile): Buffer =>
file.buffer !== undefined ? file.buffer : (file.buffer = Buffer.from(file.contents));

// Stats are currently lazily loaded. Should they be?
export const getFileStats = (file: CompiledSourceFile): Stats | Promise<Stats> =>
export const getFileStats = (file: CompiledFile): Stats | Promise<Stats> =>
file.stats !== undefined
? file.stats
: stat(file.id).then((stats) => {
Expand Down

0 comments on commit 68bca52

Please sign in to comment.