Skip to content

Commit

Permalink
build: add a rule to extract information about tokens
Browse files Browse the repository at this point in the history
Adds the new `extract_tokens` build rule that looks through all the passed-in themes and extracts information about their tokens into a JSON file.

(cherry picked from commit 7ad38ee)
  • Loading branch information
crisbeto committed Oct 3, 2024
1 parent f8b5359 commit c7e27ac
Show file tree
Hide file tree
Showing 5 changed files with 258 additions and 0 deletions.
2 changes: 2 additions & 0 deletions tools/defaults.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ load("@npm//tsec:index.bzl", _tsec_test = "tsec_test")
load("//:packages.bzl", "NO_STAMP_NPM_PACKAGE_SUBSTITUTIONS", "NPM_PACKAGE_SUBSTITUTIONS")
load("//:pkg-externals.bzl", "PKG_EXTERNALS")
load("//tools/markdown-to-html:index.bzl", _markdown_to_html = "markdown_to_html")
load("//tools/extract-tokens:index.bzl", _extract_tokens = "extract_tokens")
load("//tools/angular:index.bzl", "LINKER_PROCESSED_FW_PACKAGES")

_DEFAULT_TSCONFIG_BUILD = "//src:bazel-tsconfig-build.json"
Expand All @@ -30,6 +31,7 @@ npmPackageSubstitutions = select({
# Re-exports to simplify build file load statements
markdown_to_html = _markdown_to_html
integration_test = _integration_test
extract_tokens = _extract_tokens
esbuild = _esbuild
esbuild_config = _esbuild_config
http_server = _http_server
Expand Down
26 changes: 26 additions & 0 deletions tools/extract-tokens/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
load("@build_bazel_rules_nodejs//:index.bzl", "nodejs_binary")
load("//tools:defaults.bzl", "ts_library")

package(default_visibility = ["//visibility:public"])

ts_library(
name = "extract_tokens_lib",
srcs = glob(["**/*.ts"]),
# TODO(ESM): remove this once the Bazel NodeJS rules can handle ESM with `nodejs_binary`.
devmode_module = "commonjs",
tsconfig = ":tsconfig.json",
deps = [
"@npm//@types/node",
"@npm//sass",
],
)

nodejs_binary(
name = "extract-tokens",
data = [
":extract_tokens_lib",
"@npm//sass",
],
entry_point = ":extract-tokens.ts",
templated_args = ["--bazel_patch_module_resolver"],
)
166 changes: 166 additions & 0 deletions tools/extract-tokens/extract-tokens.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,166 @@
import {writeFileSync} from 'fs';
import {relative, basename, join} from 'path';
import {compileString} from 'sass';

/** Types of tokens. */
type TokenType = 'base' | 'color' | 'typography' | 'density';

/** Extracted data for a single token. */
interface Token {
/** Name of the token. */
name: string;
/** System token that it was derived from. */
derivedFrom?: string;
}

// Script that extracts the tokens from a specific Bazel target.
if (require.main === module) {
const [packagePath, outputPath, ...inputFiles] = process.argv.slice(2);
const themeFiles = inputFiles.filter(
file =>
// Filter out only the files within the package
// since the path also includes dependencies.
file.startsWith(packagePath) &&
// Assumption: all theme files start with an underscore
// since they're partials and they end with `-theme`.
basename(file).startsWith('_') &&
file.endsWith('-theme.scss'),
);

if (themeFiles.length === 0) {
throw new Error(`Could not find theme files in ${packagePath}`);
}

const theme = compileTheme(packagePath, themeFiles);
const base = parseTokens('base', theme);
const color = parseTokens('color', theme);
const typography = parseTokens('typography', theme);
const density = parseTokens('density', theme);

writeFileSync(
outputPath,
JSON.stringify({
totalTokens: base.length + color.length + typography.length + density.length,
base,
color,
typography,
density,
}),
);
}

/**
* Compiles a theme from which tokens can be extracted.
* @param packagePath Path of the package being processed.
* @param themeFiles File paths of the theme files within the package.
*/
function compileTheme(packagePath: string, themeFiles: string[]): string {
const imports: string[] = [];
const base: string[] = [];
const color: string[] = [];
const typography: string[] = [];
const density: string[] = [];

for (let i = 0; i < themeFiles.length; i++) {
const localName = `ctx${i}`;
imports.push(`@use './${relative(packagePath, themeFiles[i])}' as ${localName};`);
base.push(`@include ${localName}.base($theme);`);
color.push(`@include ${localName}.color($theme);`);
typography.push(`@include ${localName}.typography($theme);`);
density.push(`@include ${localName}.density($theme);`);
}

// Note: constructing the theme objects is expensive (takes ~2s locally) so we want to reduce
// the number of themes we need to compile. We minimize the impact by outputting all the sections
// into a single theme file and separating them with markers. Later on in the script we can
// use the markers to group the tokens.
const theme = `
@use '../core/theming/definition';
@use '../core/theming/palettes';
${imports.join('\n')}
$theme: definition.define-theme((
color: (
theme-type: light,
primary: palettes.$azure-palette,
tertiary: palettes.$blue-palette,
use-system-variables: true,
),
typography: (use-system-variables: true),
density: (scale: 0),
));
${getMarker('base', 'start')} :root {${base.join('\n')}}${getMarker('base', 'end')}
${getMarker('color', 'start')} :root {${color.join('\n')}}${getMarker('color', 'end')}
${getMarker('typography', 'start')} :root {${typography.join('\n')}}${getMarker('typography', 'end')}
${getMarker('density', 'start')} :root {${density.join('\n')}}${getMarker('density', 'end')}
`;

// Note: this is using the synchronous `compileString`, even though the Sass docs claim the async
// version is faster. From local testing the synchronous version was faster (~2s versus ~5s).
return compileString(theme, {
loadPaths: [join(process.cwd(), packagePath)],
sourceMap: false,
}).css;
}

/**
* Parses the tokens of a specific type from a compiled theme.
* @param type Type of tokens to look for.
* @param theme Theme from which to parse the tokens.
*/
function parseTokens(type: TokenType, theme: string): Token[] {
const startMarker = getMarker(type, 'start');
const endMarker = getMarker(type, 'end');
const sectionText = textBetween(theme, startMarker, endMarker);

if (sectionText === null) {
throw new Error(`Could not find parse tokens for ${type}`);
}

return (
(sectionText.match(/\s--.+\s*:.+;/g) || [])
.map(rawToken => {
const [name, value] = rawToken.split(':');
const token: Token = {name: name.trim()};
// Assumption: tokens whose value contains a system variable
// reference are derived from that system variable.
const derivedFrom = textBetween(value, 'var(', ')');
if (derivedFrom) {
token.derivedFrom = derivedFrom;
}
return token;
})
// Sort the tokens by name so they look better in the final output.
.sort((a, b) => a.name.localeCompare(b.name))
);
}

/**
* Creates a marker that can be used to differentiate the section in a theme file.
* @param type Type of the tokens in the section.
* @param location Whether this is a start or end token.
*/
function getMarker(type: TokenType, location: 'start' | 'end'): string {
return `/*! ${type} ${location} */`;
}

/**
* Gets the substring between two strings.
* @param text String from which to extract the substring.
* @param start Start marker of the substring.
* @param end End marker of the substring.
*/
function textBetween(text: string, start: string, end: string): string | null {
const startIndex = text.indexOf(start);
if (startIndex === -1) {
return null;
}

const endIndex = text.indexOf(end, startIndex);
if (endIndex === -1) {
return null;
}

return text.slice(startIndex + start.length, endIndex);
}
50 changes: 50 additions & 0 deletions tools/extract-tokens/index.bzl
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
"""
Implementation of the "extract_tokens" rule.
"""

def _extract_tokens(ctx):
input_files = ctx.files.srcs
args = ctx.actions.args()

# Do nothing if there are no input files. Bazel will throw if we schedule an action
# that returns no outputs.
if not input_files:
return None

# Derive the name of the output file from the package.
output_file_name = ctx.actions.declare_file(ctx.label.package.split("/")[-1] + ".json")
expected_outputs = [output_file_name]

# Pass the necessary information like the package name and files to the script.
args.add(ctx.label.package, output_file_name)

for input_file in input_files:
args.add(input_file.path)

# Run the token extraction executable. Note that we specify the outputs because Bazel
# can throw an error if the script didn't generate the required outputs.
ctx.actions.run(
inputs = input_files,
executable = ctx.executable._extract_tokens,
outputs = expected_outputs,
arguments = [args],
progress_message = "ExtractTokens",
)

return DefaultInfo(files = depset(expected_outputs))

"""
Rule definition for the "extract_tokens" rule that can extract
information about CSS tokens from a set of source files.
"""
extract_tokens = rule(
implementation = _extract_tokens,
attrs = {
"srcs": attr.label_list(),
"_extract_tokens": attr.label(
default = Label("//tools/extract-tokens"),
executable = True,
cfg = "exec",
),
},
)
14 changes: 14 additions & 0 deletions tools/extract-tokens/tsconfig.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"compilerOptions": {
"lib": ["es2020"],
"module": "commonjs",
"target": "es2020",
"esModuleInterop": true,
"sourceMap": true,
"strict": true,
"types": ["node"]
},
"bazelOptions": {
"suppressTsconfigOverrideWarnings": true
}
}

0 comments on commit c7e27ac

Please sign in to comment.