-
-
Notifications
You must be signed in to change notification settings - Fork 2.6k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: add Content Layer loader (#11334)
* wip * wip * wip * Update demo * Add meta * wip * Add file loader * Add schema validation * Remove log * Changeset * Format * Lockfile * Fix type * Handle loading for data store JSON * Use rollup util to import JSON * Fix types * Format * Add tests * Changes from review
- Loading branch information
Showing
28 changed files
with
1,252 additions
and
79 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,155 @@ | ||
import { promises as fs, type PathLike, existsSync } from 'fs'; | ||
export class DataStore { | ||
#collections = new Map<string, Map<string, any>>(); | ||
constructor() { | ||
this.#collections = new Map(); | ||
} | ||
get(collectionName: string, key: string) { | ||
return this.#collections.get(collectionName)?.get(String(key)); | ||
} | ||
entries(collectionName: string): Array<[id: string, any]> { | ||
const collection = this.#collections.get(collectionName) ?? new Map(); | ||
return [...collection.entries()]; | ||
} | ||
values(collectionName: string): Array<unknown> { | ||
const collection = this.#collections.get(collectionName) ?? new Map(); | ||
return [...collection.values()]; | ||
} | ||
keys(collectionName: string): Array<string> { | ||
const collection = this.#collections.get(collectionName) ?? new Map(); | ||
return [...collection.keys()]; | ||
} | ||
set(collectionName: string, key: string, value: unknown) { | ||
const collection = this.#collections.get(collectionName) ?? new Map(); | ||
collection.set(String(key), value); | ||
this.#collections.set(collectionName, collection); | ||
} | ||
delete(collectionName: string, key: string) { | ||
const collection = this.#collections.get(collectionName); | ||
if (collection) { | ||
collection.delete(String(key)); | ||
} | ||
} | ||
clear(collectionName: string) { | ||
this.#collections.delete(collectionName); | ||
} | ||
|
||
has(collectionName: string, key: string) { | ||
const collection = this.#collections.get(collectionName); | ||
if (collection) { | ||
return collection.has(String(key)); | ||
} | ||
return false; | ||
} | ||
|
||
hasCollection(collectionName: string) { | ||
return this.#collections.has(collectionName); | ||
} | ||
|
||
collections() { | ||
return this.#collections; | ||
} | ||
|
||
scopedStore(collectionName: string): ScopedDataStore { | ||
return { | ||
get: (key: string) => this.get(collectionName, key), | ||
entries: () => this.entries(collectionName), | ||
values: () => this.values(collectionName), | ||
keys: () => this.keys(collectionName), | ||
set: (key: string, value: any) => this.set(collectionName, key, value), | ||
delete: (key: string) => this.delete(collectionName, key), | ||
clear: () => this.clear(collectionName), | ||
has: (key: string) => this.has(collectionName, key), | ||
}; | ||
} | ||
|
||
metaStore(collectionName: string): MetaStore { | ||
return this.scopedStore(`meta:${collectionName}`) as MetaStore; | ||
} | ||
|
||
toString() { | ||
return JSON.stringify( | ||
Array.from(this.#collections.entries()).map(([collectionName, collection]) => { | ||
return [collectionName, Array.from(collection.entries())]; | ||
}) | ||
); | ||
} | ||
|
||
async writeToDisk(filePath: PathLike) { | ||
try { | ||
await fs.writeFile(filePath, this.toString()); | ||
} catch { | ||
throw new Error(`Failed to save data store to disk`); | ||
} | ||
} | ||
|
||
static async fromDisk(filePath: PathLike) { | ||
if (!existsSync(filePath)) { | ||
return new DataStore(); | ||
} | ||
const str = await fs.readFile(filePath, 'utf-8'); | ||
return DataStore.fromString(str); | ||
} | ||
|
||
static fromString(str: string) { | ||
const entries = JSON.parse(str); | ||
return DataStore.fromJSON(entries); | ||
} | ||
|
||
static async fromModule() { | ||
try { | ||
// @ts-expect-error | ||
const data = await import('astro:data-layer-content'); | ||
return DataStore.fromJSON(data.default); | ||
} catch {} | ||
return new DataStore(); | ||
} | ||
|
||
static fromJSON(entries: Array<[string, Array<[string, any]>]>) { | ||
const collections = new Map<string, Map<string, any>>(); | ||
for (const [collectionName, collection] of entries) { | ||
collections.set(collectionName, new Map(collection)); | ||
} | ||
const store = new DataStore(); | ||
store.#collections = collections; | ||
return store; | ||
} | ||
} | ||
|
||
export interface ScopedDataStore { | ||
get: (key: string) => unknown; | ||
entries: () => Array<[id: string, unknown]>; | ||
set: (key: string, value: unknown) => void; | ||
values: () => Array<unknown>; | ||
keys: () => Array<string>; | ||
delete: (key: string) => void; | ||
clear: () => void; | ||
has: (key: string) => boolean; | ||
} | ||
|
||
/** | ||
* A key-value store for metadata strings. Useful for storing things like sync tokens. | ||
*/ | ||
|
||
export interface MetaStore { | ||
get: (key: string) => string | undefined; | ||
set: (key: string, value: string) => void; | ||
has: (key: string) => boolean; | ||
} | ||
|
||
function dataStoreSingleton() { | ||
let instance: Promise<DataStore> | DataStore | undefined = undefined; | ||
return { | ||
get: async () => { | ||
if (!instance) { | ||
instance = DataStore.fromModule(); | ||
} | ||
return instance; | ||
}, | ||
set: (store: DataStore) => { | ||
instance = store; | ||
}, | ||
}; | ||
} | ||
|
||
export const globalDataStore = dataStoreSingleton(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
import { fileURLToPath } from 'url'; | ||
import type { Loader } from './loaders.js'; | ||
import { promises as fs, existsSync } from 'fs'; | ||
|
||
/** | ||
* Loads entries from a JSON file. The file must contain an array of objects that contain unique `id` fields, or an object with string keys. | ||
* @todo Add support for other file types, such as YAML, CSV etc. | ||
* @param fileName The path to the JSON file to load, relative to the content directory. | ||
*/ | ||
export function file(fileName: string): Loader { | ||
if (fileName.includes('*')) { | ||
// TODO: AstroError | ||
throw new Error('Glob patterns are not supported in `file` loader. Use `glob` loader instead.'); | ||
} | ||
return { | ||
name: 'file-loader', | ||
load: async ({ store, logger, settings, parseData }) => { | ||
const contentDir = new URL('./content/', settings.config.srcDir); | ||
|
||
const url = new URL(fileName, contentDir); | ||
if (!existsSync(url)) { | ||
logger.error(`File not found: ${fileName}`); | ||
return; | ||
} | ||
|
||
let json: Array<Record<string, unknown>>; | ||
|
||
try { | ||
const data = await fs.readFile(url, 'utf-8'); | ||
json = JSON.parse(data); | ||
} catch (error: any) { | ||
logger.error(`Error reading data from ${fileName}`); | ||
logger.debug(error.message); | ||
return; | ||
} | ||
|
||
const filePath = fileURLToPath(url); | ||
|
||
if (Array.isArray(json)) { | ||
if (json.length === 0) { | ||
logger.warn(`No items found in ${fileName}`); | ||
} | ||
for (const rawItem of json) { | ||
const id = (rawItem.id ?? rawItem.slug)?.toString(); | ||
if (!id) { | ||
logger.error(`Item in ${fileName} is missing an id or slug field.`); | ||
continue; | ||
} | ||
const item = await parseData({ id, data: rawItem, filePath }); | ||
store.set(id, item); | ||
} | ||
} else if (typeof json === 'object') { | ||
for (const [id, rawItem] of Object.entries<Record<string, unknown>>(json)) { | ||
const item = await parseData({ id, data: rawItem, filePath }); | ||
store.set(id, item); | ||
} | ||
} else { | ||
logger.error(`Invalid data in ${fileName}. Must be an array or object.`); | ||
} | ||
}, | ||
}; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,123 @@ | ||
import type { ZodSchema } from 'zod'; | ||
import type { AstroSettings } from '../@types/astro.js'; | ||
import type { AstroIntegrationLogger, Logger } from '../core/logger/core.js'; | ||
import { DataStore, globalDataStore, type MetaStore, type ScopedDataStore } from './data-store.js'; | ||
import { getEntryData, globalContentConfigObserver } from './utils.js'; | ||
import { promises as fs, existsSync } from 'fs'; | ||
import { DATA_STORE_FILE } from './consts.js'; | ||
|
||
export interface ParseDataOptions { | ||
/** The ID of the entry. Unique per collection */ | ||
id: string; | ||
/** The raw, unvalidated data of the entry */ | ||
data: Record<string, unknown>; | ||
/** An optional file path, where the entry represents a local file. */ | ||
filePath?: string; | ||
} | ||
|
||
export interface LoaderContext { | ||
/** The unique name of the collection */ | ||
collection: string; | ||
/** A database abstraction to store the actual data */ | ||
store: ScopedDataStore; | ||
/** A simple KV store, designed for things like sync tokens */ | ||
meta: MetaStore; | ||
logger: AstroIntegrationLogger; | ||
|
||
settings: AstroSettings; | ||
|
||
/** Validates and parses the data according to the collection schema */ | ||
parseData<T extends Record<string, unknown> = Record<string, unknown>>( | ||
props: ParseDataOptions | ||
): T; | ||
} | ||
|
||
export interface Loader<S extends ZodSchema = ZodSchema> { | ||
/** Unique name of the loader, e.g. the npm package name */ | ||
name: string; | ||
/** Do the actual loading of the data */ | ||
load: (context: LoaderContext) => Promise<void>; | ||
/** Optionally, define the schema of the data. Will be overridden by user-defined schema */ | ||
schema?: S | Promise<S> | (() => S | Promise<S>); | ||
render?: (entry: any) => any; | ||
} | ||
|
||
/** | ||
* Run the `load()` method of each collection's loader, which will load the data and save it in the data store. | ||
* The loader itself is responsible for deciding whether this will clear and reload the full collection, or | ||
* perform an incremental update. After the data is loaded, the data store is written to disk. | ||
*/ | ||
export async function syncContentLayer({ | ||
settings, | ||
logger: globalLogger, | ||
store, | ||
}: { settings: AstroSettings; logger: Logger; store?: DataStore }) { | ||
const logger = globalLogger.forkIntegrationLogger('content'); | ||
if (!store) { | ||
store = await DataStore.fromDisk(new URL(DATA_STORE_FILE, settings.config.cacheDir)); | ||
globalDataStore.set(store); | ||
} | ||
const contentConfig = globalContentConfigObserver.get(); | ||
if (contentConfig?.status !== 'loaded') { | ||
logger.debug('Content config not loaded, skipping sync'); | ||
return; | ||
} | ||
await Promise.all( | ||
Object.entries(contentConfig.config.collections).map(async ([name, collection]) => { | ||
if (collection.type !== 'experimental_data') { | ||
return; | ||
} | ||
|
||
let { schema } = collection; | ||
|
||
if (!schema) { | ||
schema = collection.loader.schema; | ||
} | ||
|
||
if (typeof schema === 'function') { | ||
schema = await schema({ | ||
image: () => { | ||
throw new Error('Images are currently not supported for experimental data collections'); | ||
}, | ||
}); | ||
} | ||
|
||
const collectionWithResolvedSchema = { ...collection, schema }; | ||
|
||
function parseData<T extends Record<string, unknown> = Record<string, unknown>>({ | ||
id, | ||
data, | ||
filePath = '', | ||
}: { id: string; data: T; filePath?: string }): T { | ||
return getEntryData( | ||
{ | ||
id, | ||
collection: name, | ||
unvalidatedData: data, | ||
_internal: { | ||
rawData: undefined, | ||
filePath, | ||
}, | ||
}, | ||
collectionWithResolvedSchema, | ||
false | ||
) as unknown as T; | ||
} | ||
|
||
return collection.loader.load({ | ||
collection: name, | ||
store: store.scopedStore(name), | ||
meta: store.metaStore(name), | ||
logger, | ||
settings, | ||
parseData, | ||
}); | ||
}) | ||
); | ||
const cacheFile = new URL(DATA_STORE_FILE, settings.config.cacheDir); | ||
if (!existsSync(settings.config.cacheDir)) { | ||
await fs.mkdir(settings.config.cacheDir, { recursive: true }); | ||
} | ||
await store.writeToDisk(cacheFile); | ||
logger.info('Synced content'); | ||
} |
Oops, something went wrong.