diff --git a/.eslintrc b/.eslintrc index 0c768f01c..407d6e430 100644 --- a/.eslintrc +++ b/.eslintrc @@ -18,7 +18,11 @@ "@typescript-eslint/no-var-requires": "off", "@typescript-eslint/no-explicit-any": "off", "@typescript-eslint/no-non-null-assertion": "off", - "@typescript-eslint/ban-types": "off" + "@typescript-eslint/ban-types": "off", + "@typescript-eslint/no-unused-vars": [ + "error", + { "ignoreRestSiblings": true } + ] }, "ignorePatterns": ["dist", "node_modules", "generated"] } diff --git a/packages/core/package.json b/packages/core/package.json index f12b0ceb6..0b139d17f 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -27,8 +27,11 @@ "typecheck": "tsc --project tsconfig.json --noEmit" }, "dependencies": { + "@apollo/client": "^3.8.3", "@babel/code-frame": "^7.18.6", + "@graphql-tools/schema": "^10.0.0", "@jridgewell/trace-mapping": "^0.3.17", + "apollo-type-bigint": "^0.1.3", "async-mutex": "^0.4.0", "better-sqlite3": "^8.5.0", "cac": "^6.7.14", @@ -45,6 +48,8 @@ "express-graphql": "^0.12.0", "glob": "^8.1.0", "graphql": "^15.3.0", + "graphql-subscriptions": "^2.0.0", + "graphql-ws": "^5.14.0", "http-terminator": "^3.2.0", "ink": "^3.2.0", "kysely": "^0.24.2", @@ -60,9 +65,11 @@ "retry": "^0.13.1", "stacktrace-parser": "^0.1.10", "tsc-alias": "^1.8.2", - "viem": "^1.2.6" + "viem": "^1.2.6", + "ws": "^8.14.1" }, "devDependencies": { + "@graphql-tools/utils": "^10.0.6", "@types/babel__code-frame": "^7.0.3", "@types/better-sqlite3": "^7.6.0", "@types/cors": "^2.8.12", @@ -76,6 +83,7 @@ "@types/react": "^18.0.25", "@types/retry": "^0.12.2", "@types/supertest": "^2.0.12", + "@types/ws": "^8.5.5", "@viem/anvil": "^0.0.6", "abitype": "^0.8.11", "concurrently": "^8.2.0", diff --git a/packages/core/src/Ponder.ts b/packages/core/src/Ponder.ts index f9f94af28..e5787ed77 100644 --- a/packages/core/src/Ponder.ts +++ b/packages/core/src/Ponder.ts @@ -1,3 +1,4 @@ +import assert from "node:assert"; import path from "node:path"; import process from "node:process"; @@ -8,13 +9,16 @@ import { buildContracts } from "@/config/contracts"; import { buildDatabase } from "@/config/database"; import { type LogFilter, buildLogFilters } from "@/config/logFilters"; import { type Network, buildNetwork } from "@/config/networks"; -import { type Options } from "@/config/options"; +import { type Options, AppMode } from "@/config/options"; import { UserErrorService } from "@/errors/service"; +import { GqlEventAggregatorService } from "@/event-aggregator/gql-service"; +import { InternalEventAggregatorService } from "@/event-aggregator/internal-service"; import { EventAggregatorService } from "@/event-aggregator/service"; import { PostgresEventStore } from "@/event-store/postgres/store"; import { SqliteEventStore } from "@/event-store/sqlite/store"; import { type EventStore } from "@/event-store/store"; import { HistoricalSyncService } from "@/historical-sync/service"; +import { IndexingServerService } from "@/indexing-server/service"; import { LoggerService } from "@/logs/service"; import { MetricsService } from "@/metrics/service"; import { RealtimeSyncService } from "@/realtime-sync/service"; @@ -25,6 +29,7 @@ import { EventHandlerService } from "@/user-handlers/service"; import { PostgresUserStore } from "@/user-store/postgres/store"; import { SqliteUserStore } from "@/user-store/sqlite/store"; import { type UserStore } from "@/user-store/store"; +import { createGqlClient } from "@/utils/graphql-client"; export type Common = { options: Options; @@ -53,6 +58,7 @@ export class Ponder { eventHandlerService: EventHandlerService; serverService: ServerService; + indexingServerService: IndexingServerService; buildService: BuildService; codegenService: CodegenService; uiService: UiService; @@ -103,13 +109,13 @@ export class Ponder { this.eventStore = eventStore ?? (database.kind === "sqlite" - ? new SqliteEventStore({ db: database.db }) + ? new SqliteEventStore({ db: database.eventStoreDb }) : new PostgresEventStore({ pool: database.pool })); this.userStore = userStore ?? (database.kind === "sqlite" - ? new SqliteUserStore({ db: database.db }) + ? new SqliteUserStore({ db: database.userStoreDb }) : new PostgresUserStore({ pool: database.pool })); networks.forEach((network) => { @@ -134,13 +140,28 @@ export class Ponder { }); }); - this.eventAggregatorService = new EventAggregatorService({ + this.indexingServerService = new IndexingServerService({ common, eventStore: this.eventStore, networks, - logFilters, }); + const gqlClient = createGqlClient(this.common.options.indexerGqlEndpoint); + + this.eventAggregatorService = this.checkAppMode(AppMode.Watcher) + ? new GqlEventAggregatorService({ + common, + gqlClient, + networks, + logFilters, + }) + : new InternalEventAggregatorService({ + common, + eventStore: this.eventStore, + networks, + logFilters, + }); + this.eventHandlerService = new EventHandlerService({ common, eventStore: this.eventStore, @@ -174,38 +195,61 @@ export class Ponder { this.registerServiceDependencies(); - // If any of the provided networks do not have a valid RPC url, - // kill the app here. This happens here rather than in the constructor because - // `ponder codegen` should still be able to if an RPC url is missing. In fact, - // that is part of the happy path for `create-ponder`. - const networksMissingRpcUrl: Network[] = []; - this.networkSyncServices.forEach(({ network }) => { - if (!network.rpcUrl) { - networksMissingRpcUrl.push(network); + // Setup indexer services if mode is standalone or indexer + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Indexer) + ) { + // If any of the provided networks do not have a valid RPC url, + // kill the app here. This happens here rather than in the constructor because + // `ponder codegen` should still be able to if an RPC url is missing. In fact, + // that is part of the happy path for `create-ponder`. + const networksMissingRpcUrl: Network[] = []; + this.networkSyncServices.forEach(({ network }) => { + if (!network.rpcUrl) { + networksMissingRpcUrl.push(network); + } + }); + if (networksMissingRpcUrl.length > 0) { + return new Error( + `missing RPC URL for networks (${networksMissingRpcUrl.map( + (n) => `"${n.name}"` + )}). Did you forget to add an RPC URL in .env.local?` + ); } - }); - if (networksMissingRpcUrl.length > 0) { - return new Error( - `missing RPC URL for networks (${networksMissingRpcUrl.map( - (n) => `"${n.name}"` - )}). Did you forget to add an RPC URL in .env.local?` - ); + + // Start indexing server if running in indexer mode + if (this.checkAppMode(AppMode.Indexer)) { + await this.indexingServerService.start(); + } + + // Note that this must occur before loadSchema and loadHandlers. + await this.eventStore.migrateUp(); } - // Start the HTTP server. - await this.serverService.start(); + // Setup watcher services if mode is standalone or watcher + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Watcher) + ) { + // Subscribe to Sync service events from indexing server if running in watcher mode + if (this.checkAppMode(AppMode.Watcher)) { + assert(this.eventAggregatorService.subscribeToSyncEvents); + await this.eventAggregatorService.subscribeToSyncEvents(); + } - // These files depend only on ponder.config.ts, so can generate once on setup. - // Note that loadHandlers depends on the index.ts file being present. - this.codegenService.generateAppFile(); + // Start the HTTP server. + await this.serverService.start(); - // Note that this must occur before loadSchema and loadHandlers. - await this.eventStore.migrateUp(); + // These files depend only on ponder.config.ts, so can generate once on setup. + // Note that loadHandlers depends on the index.ts file being present. + this.codegenService.generateAppFile(); - // Manually trigger loading schema and handlers. Subsequent loads - // are triggered by changes to project files (handled in BuildService). - this.buildService.buildSchema(); - await this.buildService.buildHandlers(); + // Manually trigger loading schema and handlers. Subsequent loads + // are triggered by changes to project files (handled in BuildService). + this.buildService.buildSchema(); + await this.buildService.buildHandlers(); + } return undefined; } @@ -232,17 +276,23 @@ export class Ponder { return await this.kill(); } - await Promise.all( - this.networkSyncServices.map( - async ({ historicalSyncService, realtimeSyncService }) => { - const blockNumbers = await realtimeSyncService.setup(); - await historicalSyncService.setup(blockNumbers); - - historicalSyncService.start(); - realtimeSyncService.start(); - } - ) - ); + // Start sync services if running in standalone or indexer mode + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Indexer) + ) { + await Promise.all( + this.networkSyncServices.map( + async ({ historicalSyncService, realtimeSyncService }) => { + const blockNumbers = await realtimeSyncService.setup(); + await historicalSyncService.setup(blockNumbers); + + historicalSyncService.start(); + realtimeSyncService.start(); + } + ) + ); + } this.buildService.watch(); } @@ -269,17 +319,23 @@ export class Ponder { return await this.kill(); } - await Promise.all( - this.networkSyncServices.map( - async ({ historicalSyncService, realtimeSyncService }) => { - const blockNumbers = await realtimeSyncService.setup(); - await historicalSyncService.setup(blockNumbers); - - historicalSyncService.start(); - realtimeSyncService.start(); - } - ) - ); + // Start sync services if running in standalone or indexer mode + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Indexer) + ) { + await Promise.all( + this.networkSyncServices.map( + async ({ historicalSyncService, realtimeSyncService }) => { + const blockNumbers = await realtimeSyncService.setup(); + await historicalSyncService.setup(blockNumbers); + + historicalSyncService.start(); + realtimeSyncService.start(); + } + ) + ); + } } async codegen() { @@ -296,7 +352,7 @@ export class Ponder { } async kill() { - this.eventAggregatorService.clearListeners(); + this.eventAggregatorService.kill(); this.common.telemetry.record({ event: "App Killed", @@ -336,81 +392,144 @@ export class Ponder { await this.kill(); }); - this.buildService.on("newSchema", async ({ schema, graphqlSchema }) => { - this.codegenService.generateAppFile({ schema }); - this.codegenService.generateSchemaFile({ graphqlSchema }); - - this.serverService.reload({ graphqlSchema }); + // Register build service listeners if running in standalone or watcher mode + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Watcher) + ) { + this.buildService.on("newSchema", async ({ schema, graphqlSchema }) => { + this.codegenService.generateAppFile({ schema }); + this.codegenService.generateSchemaFile({ graphqlSchema }); - await this.eventHandlerService.reset({ schema }); - await this.eventHandlerService.processEvents(); - }); + this.serverService.reload({ graphqlSchema }); - this.buildService.on("newHandlers", async ({ handlers }) => { - await this.eventHandlerService.reset({ handlers }); - await this.eventHandlerService.processEvents(); - }); + await this.eventHandlerService.reset({ schema }); + await this.eventHandlerService.processEvents(); + }); - this.networkSyncServices.forEach((networkSyncService) => { - const { chainId } = networkSyncService.network; - const { historicalSyncService, realtimeSyncService } = networkSyncService; + this.buildService.on("newHandlers", async ({ handlers }) => { + await this.eventHandlerService.reset({ handlers }); + await this.eventHandlerService.processEvents(); + }); + } - historicalSyncService.on("historicalCheckpoint", ({ timestamp }) => { - this.eventAggregatorService.handleNewHistoricalCheckpoint({ - chainId, - timestamp, + // Register network service listeners if running in standalone or indexer mode + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Indexer) + ) { + this.networkSyncServices.forEach((networkSyncService) => { + const { chainId } = networkSyncService.network; + const { historicalSyncService, realtimeSyncService } = + networkSyncService; + + historicalSyncService.on("historicalCheckpoint", ({ timestamp }) => { + if (this.checkAppMode(AppMode.Indexer)) { + this.indexingServerService.handleNewHistoricalCheckpoint({ + chainId, + timestamp, + }); + } else { + this.eventAggregatorService.handleNewHistoricalCheckpoint({ + chainId, + timestamp, + }); + } }); - }); - historicalSyncService.on("syncComplete", () => { - this.eventAggregatorService.handleHistoricalSyncComplete({ - chainId, + historicalSyncService.on("syncComplete", () => { + if (this.checkAppMode(AppMode.Indexer)) { + this.indexingServerService.handleHistoricalSyncComplete({ + chainId, + }); + } else { + this.eventAggregatorService.handleHistoricalSyncComplete({ + chainId, + }); + } }); - }); - realtimeSyncService.on("realtimeCheckpoint", ({ timestamp }) => { - this.eventAggregatorService.handleNewRealtimeCheckpoint({ - chainId, - timestamp, + realtimeSyncService.on("realtimeCheckpoint", ({ timestamp }) => { + if (this.checkAppMode(AppMode.Indexer)) { + this.indexingServerService.handleNewRealtimeCheckpoint({ + chainId, + timestamp, + }); + } else { + this.eventAggregatorService.handleNewRealtimeCheckpoint({ + chainId, + timestamp, + }); + } }); - }); - realtimeSyncService.on("finalityCheckpoint", ({ timestamp }) => { - this.eventAggregatorService.handleNewFinalityCheckpoint({ - chainId, - timestamp, + realtimeSyncService.on("finalityCheckpoint", ({ timestamp }) => { + if (this.checkAppMode(AppMode.Indexer)) { + this.indexingServerService.handleNewFinalityCheckpoint({ + chainId, + timestamp, + }); + } else { + this.eventAggregatorService.handleNewFinalityCheckpoint({ + chainId, + timestamp, + }); + } }); + + realtimeSyncService.on( + "shallowReorg", + ({ commonAncestorTimestamp }) => { + if (this.checkAppMode(AppMode.Indexer)) { + this.indexingServerService.handleReorg({ + commonAncestorTimestamp, + }); + } else { + this.eventAggregatorService.handleReorg({ + commonAncestorTimestamp, + }); + } + } + ); }); + } - realtimeSyncService.on("shallowReorg", ({ commonAncestorTimestamp }) => { - this.eventAggregatorService.handleReorg({ commonAncestorTimestamp }); + // Register event aggregator and handler service listeners if running in standalone or watcher mode + if ( + this.checkAppMode(AppMode.Standalone) || + this.checkAppMode(AppMode.Watcher) + ) { + this.eventAggregatorService.on("newCheckpoint", async () => { + await this.eventHandlerService.processEvents(); }); - }); - this.eventAggregatorService.on("newCheckpoint", async () => { - await this.eventHandlerService.processEvents(); - }); + this.eventAggregatorService.on( + "reorg", + async ({ commonAncestorTimestamp }) => { + await this.eventHandlerService.handleReorg({ + commonAncestorTimestamp, + }); + await this.eventHandlerService.processEvents(); + } + ); - this.eventAggregatorService.on( - "reorg", - async ({ commonAncestorTimestamp }) => { - await this.eventHandlerService.handleReorg({ commonAncestorTimestamp }); - await this.eventHandlerService.processEvents(); - } - ); + this.eventHandlerService.on("eventsProcessed", ({ toTimestamp }) => { + if (this.serverService.isHistoricalEventProcessingComplete) return; + + // If a batch of events are processed AND the historical sync is complete AND + // the new toTimestamp is greater than the historical sync completion timestamp, + // historical event processing is complete, and the server should begin responding as healthy. + if ( + this.eventAggregatorService.historicalSyncCompletedAt && + toTimestamp >= this.eventAggregatorService.historicalSyncCompletedAt + ) { + this.serverService.setIsHistoricalEventProcessingComplete(); + } + }); + } + } - this.eventHandlerService.on("eventsProcessed", ({ toTimestamp }) => { - if (this.serverService.isHistoricalEventProcessingComplete) return; - - // If a batch of events are processed AND the historical sync is complete AND - // the new toTimestamp is greater than the historical sync completion timestamp, - // historical event processing is complete, and the server should begin responding as healthy. - if ( - this.eventAggregatorService.historicalSyncCompletedAt && - toTimestamp >= this.eventAggregatorService.historicalSyncCompletedAt - ) { - this.serverService.setIsHistoricalEventProcessingComplete(); - } - }); + private checkAppMode(mode: AppMode) { + return this.common.options.mode === mode; } } diff --git a/packages/core/src/bin/ponder.ts b/packages/core/src/bin/ponder.ts index fa266c198..0cbfc1f1c 100644 --- a/packages/core/src/bin/ponder.ts +++ b/packages/core/src/bin/ponder.ts @@ -7,7 +7,7 @@ import dotenv from "dotenv"; import path from "node:path"; import { buildConfig } from "@/config/config"; -import { buildOptions } from "@/config/options"; +import { AppMode, buildOptions } from "@/config/options"; import { Ponder } from "@/Ponder"; // NOTE: This is a workaround for tsconfig `rootDir` nonsense. @@ -26,12 +26,16 @@ const cli = cac("ponder") }) .option("--root-dir [path]", `Path to project root directory`, { default: ".", + }) + .option("--mode [mode]", `Mode of the Ponder app`, { + default: AppMode.Standalone, }); export type CliOptions = { help?: boolean; configFile: string; rootDir: string; + mode: AppMode; }; cli diff --git a/packages/core/src/config/config.ts b/packages/core/src/config/config.ts index 118528db4..746a05653 100644 --- a/packages/core/src/config/config.ts +++ b/packages/core/src/config/config.ts @@ -10,8 +10,8 @@ export type ResolvedConfig = { database?: | { kind: "sqlite"; - /** Path to SQLite database file. Default: `"./.ponder/cache.db"`. */ - filename?: string; + /** Path to SQLite database directory. Default: `"./.ponder/cache"`. */ + directory?: string; } | { kind: "postgres"; @@ -85,6 +85,8 @@ export type ResolvedConfig = { options?: { /** Maximum number of seconds to wait for event processing to be complete before responding as healthy. If event processing exceeds this duration, the API may serve incomplete data. Default: `240` (4 minutes). */ maxHealthcheckDuration?: number; + /** GQL endpoint of the indexer, required when running app in watcher mode */ + indexerGqlEndpoint?: string; }; }; diff --git a/packages/core/src/config/database.ts b/packages/core/src/config/database.ts index ab089c4e0..7b0f6c974 100644 --- a/packages/core/src/config/database.ts +++ b/packages/core/src/config/database.ts @@ -1,4 +1,5 @@ import Sqlite from "better-sqlite3"; +import assert from "node:assert"; import path from "node:path"; import pg, { Client, DatabaseError, Pool } from "pg"; @@ -10,7 +11,8 @@ import { ensureDirExists } from "@/utils/exists"; export interface SqliteDb { kind: "sqlite"; - db: Sqlite.Database; + eventStoreDb: Sqlite.Database; + userStoreDb: Sqlite.Database; } export interface PostgresDb { @@ -94,7 +96,7 @@ export const buildDatabase = ({ }): Database => { let resolvedDatabaseConfig: NonNullable; - const defaultSqliteFilename = path.join(options.ponderDir, "cache.db"); + const defaultSqliteDirectory = path.join(options.ponderDir, "cache"); if (config.database) { if (config.database.kind === "postgres") { @@ -105,7 +107,7 @@ export const buildDatabase = ({ } else { resolvedDatabaseConfig = { kind: "sqlite", - filename: config.database.filename ?? defaultSqliteFilename, + directory: config.database.directory ?? defaultSqliteDirectory, }; } } else { @@ -117,19 +119,27 @@ export const buildDatabase = ({ } else { resolvedDatabaseConfig = { kind: "sqlite", - filename: defaultSqliteFilename, + directory: defaultSqliteDirectory, }; } } if (resolvedDatabaseConfig.kind === "sqlite") { - ensureDirExists(resolvedDatabaseConfig.filename!); - const rawDb = Sqlite(resolvedDatabaseConfig.filename!); - rawDb.pragma("journal_mode = WAL"); - - const db = patchSqliteDatabase({ db: rawDb }); - - return { kind: "sqlite", db }; + const [eventStoreDb, userStoreDb] = ["event-store.db", "user-store.db"].map( + (filename) => { + assert(resolvedDatabaseConfig.kind === "sqlite"); + const dbFilePath = path.join( + resolvedDatabaseConfig.directory!, + filename + ); + ensureDirExists(dbFilePath); + const rawDb = Sqlite(dbFilePath); + rawDb.pragma("journal_mode = WAL"); + return patchSqliteDatabase({ db: rawDb }); + } + ); + + return { kind: "sqlite", eventStoreDb, userStoreDb }; } else { const pool = new Pool({ connectionString: resolvedDatabaseConfig.connectionString, diff --git a/packages/core/src/config/options.ts b/packages/core/src/config/options.ts index d67114a91..ac0a62f16 100644 --- a/packages/core/src/config/options.ts +++ b/packages/core/src/config/options.ts @@ -5,6 +5,12 @@ import type { CliOptions } from "@/bin/ponder"; import type { ResolvedConfig } from "./config"; +export enum AppMode { + Standalone = "standalone", + Indexer = "indexer", + Watcher = "watcher", +} + export type Options = { configFile: string; schemaFile: string; @@ -15,6 +21,7 @@ export type Options = { logDir: string; port: number; + indexingPort: number; maxHealthcheckDuration: number; telemetryUrl: string; telemetryDisabled: boolean; @@ -22,6 +29,9 @@ export type Options = { logLevel: LevelWithSilent; uiEnabled: boolean; + + mode: AppMode; + indexerGqlEndpoint: string; }; export const buildOptions = ({ @@ -54,9 +64,14 @@ export const buildOptions = ({ logDir: ".ponder/logs", port: Number(process.env.PORT ?? 42069), + indexingPort: Number(process.env.INDEXING_PORT ?? 42070), maxHealthcheckDuration: configOptions?.maxHealthcheckDuration ?? railwayHealthcheckTimeout ?? 240, + mode: cliOptions.mode, + indexerGqlEndpoint: + configOptions?.indexerGqlEndpoint ?? "http://localhost:42070/graphql", + telemetryUrl: "https://ponder.sh/api/telemetry", telemetryDisabled: Boolean(process.env.PONDER_TELEMETRY_DISABLED), telemetryIsExampleProject: Boolean( diff --git a/packages/core/src/event-aggregator/gql-service.ts b/packages/core/src/event-aggregator/gql-service.ts new file mode 100644 index 000000000..1f1a8d73c --- /dev/null +++ b/packages/core/src/event-aggregator/gql-service.ts @@ -0,0 +1,422 @@ +import { + type ApolloClient, + type DocumentNode, + type NormalizedCacheObject, + type ObservableSubscription, + gql, +} from "@apollo/client"; +import { type Hex, Address } from "viem"; + +import type { LogFilterName } from "@/build/handlers"; +import type { LogEventMetadata, LogFilter } from "@/config/logFilters"; +import type { Network } from "@/config/networks"; +import type { Common } from "@/Ponder"; +import type { Block } from "@/types/block"; +import type { Log } from "@/types/log"; +import type { Transaction } from "@/types/transaction"; + +import { EventAggregatorService } from "./service"; + +type Cursor = { + timestamp: number; + chainId: number; + blockNumber: number; + logIndex: number; +}; + +export class GqlEventAggregatorService extends EventAggregatorService { + private gqlClient: ApolloClient; + private subscriptions: ObservableSubscription[] = []; + + constructor({ + common, + gqlClient, + networks, + logFilters, + }: { + common: Common; + gqlClient: ApolloClient; + networks: Network[]; + logFilters: LogFilter[]; + }) { + super({ + common, + networks, + logFilters, + }); + + this.metrics = {}; + + this.gqlClient = gqlClient; + } + + /** Fetches events for all registered log filters between the specified timestamps. + * + * @param options.fromTimestamp Timestamp to start including events (inclusive). + * @param options.toTimestamp Timestamp to stop including events (inclusive). + * @param options.includeLogFilterEvents Map of log filter name -> selector -> ABI event item for which to include full event objects. + * @returns A promise resolving to an array of log events. + */ + async *getEvents({ + fromTimestamp, + toTimestamp, + includeLogFilterEvents, + }: { + fromTimestamp: number; + toTimestamp: number; + includeLogFilterEvents: { + [logFilterName: LogFilterName]: + | { + bySelector: { [selector: Hex]: LogEventMetadata }; + } + | undefined; + }; + }) { + let cursor: Cursor | undefined; + + while (true) { + const { events, metadata } = await this.getLogEvents({ + fromTimestamp, + toTimestamp, + filters: this.logFilters.map((logFilter) => ({ + name: logFilter.name, + chainId: logFilter.filter.chainId, + address: logFilter.filter.address, + topics: logFilter.filter.topics, + fromBlock: logFilter.filter.startBlock, + toBlock: logFilter.filter.endBlock, + includeEventSelectors: Object.keys( + includeLogFilterEvents[logFilter.name]?.bySelector ?? {} + ) as Hex[], + })), + cursor, + }); + + // Set cursor to fetch next batch of events from indexingClient GQL query + cursor = metadata.cursor; + + const decodedEvents = this.decodeEvents(events, includeLogFilterEvents); + + yield { events: decodedEvents, metadata }; + + if (metadata.isLastPage) break; + } + } + + override async subscribeToSyncEvents() { + this.subscriptions = [ + this.subscribeGql( + gql` + subscription { + onNewHistoricalCheckpoint { + chainId + timestamp + } + } + `, + ({ data }) => { + this.handleNewHistoricalCheckpoint(data.onNewHistoricalCheckpoint); + } + ), + this.subscribeGql( + gql` + subscription { + onHistoricalSyncComplete { + chainId + } + } + `, + ({ data }) => { + this.handleHistoricalSyncComplete(data.onHistoricalSyncComplete); + } + ), + this.subscribeGql( + gql` + subscription { + onNewRealtimeCheckpoint { + chainId + timestamp + } + } + `, + ({ data }) => { + this.handleNewRealtimeCheckpoint(data.onNewRealtimeCheckpoint); + } + ), + this.subscribeGql( + gql` + subscription { + onNewFinalityCheckpoint { + chainId + timestamp + } + } + `, + ({ data }) => { + this.handleNewFinalityCheckpoint(data.onNewFinalityCheckpoint); + } + ), + this.subscribeGql( + gql` + subscription { + onReorg { + commonAncestorTimestamp + } + } + `, + ({ data }) => { + this.handleReorg(data.onReorg); + } + ), + ]; + + await this.fetchHistoricalSync(); + } + + override kill() { + this.subscriptions.forEach((subscription) => subscription.unsubscribe()); + this.clearListeners(); + } + + private getLogEvents = async (variables: { + fromTimestamp: number; + toTimestamp: number; + filters?: { + name: string; + chainId: number; + address?: Address | Address[]; + topics?: (Hex | Hex[] | null)[]; + fromBlock?: number; + toBlock?: number; + includeEventSelectors?: Hex[]; + }[]; + cursor?: Cursor; + }) => { + // Sanitize filter values for GQL query + if (variables.filters) { + variables.filters.forEach((filter) => { + if (filter.address && !Array.isArray(filter.address)) { + filter.address = [filter.address]; + } + + if (filter.topics) { + filter.topics = filter.topics.map((topic) => + topic && !Array.isArray(topic) ? [topic] : topic + ); + } + }); + } + + const { gql } = await import("@apollo/client/core"); + + const { + data: { getLogEvents }, + } = await this.gqlClient.query({ + query: gql` + query getLogEvents( + $fromTimestamp: Int! + $toTimestamp: Int! + $filters: [Filter!] + $cursor: CursorInput + ) { + getLogEvents( + fromTimestamp: $fromTimestamp + toTimestamp: $toTimestamp + filters: $filters + cursor: $cursor + ) { + events { + logFilterName + log { + id + address + blockHash + blockNumber + data + logIndex + removed + topics + transactionHash + transactionIndex + } + block { + baseFeePerGas + difficulty + extraData + gasLimit + gasUsed + hash + logsBloom + miner + mixHash + nonce + number + parentHash + receiptsRoot + sha3Uncles + size + stateRoot + timestamp + totalDifficulty + transactionsRoot + } + transaction { + blockHash + blockNumber + from + gas + hash + input + nonce + r + s + to + transactionIndex + v + value + type + gasPrice + accessList { + address + storageKeys + } + maxFeePerGas + maxPriorityFeePerGas + } + } + metadata { + pageEndsAtTimestamp + counts { + logFilterName + selector + count + } + cursor { + timestamp + chainId + blockNumber + logIndex + } + isLastPage + } + } + } + `, + variables, + }); + + // Remove __typename from GQL query result + const { + __typename, + cursor: gqlCursor, + ...metadata + } = getLogEvents.metadata; + let cursor: Cursor | undefined; + + if (gqlCursor) { + const { __typename, ...cursorData } = gqlCursor; + cursor = cursorData; + } + + return { + events: getLogEvents.events.map((event: any) => ({ + ...event, + log: { + ...event.log, + blockNumber: BigInt(event.log.blockNumber), + }, + block: { + ...event.block, + baseFeePerGas: + event.block.baseFeePerGas && BigInt(event.block.baseFeePerGas), + difficulty: BigInt(event.block.difficulty), + gasLimit: BigInt(event.block.gasLimit), + gasUsed: BigInt(event.block.gasUsed), + number: BigInt(event.block.number), + size: BigInt(event.block.size), + timestamp: BigInt(event.block.timestamp), + totalDifficulty: BigInt(event.block.totalDifficulty), + }, + transaction: { + ...event.transaction, + blockNumber: BigInt(event.transaction.blockNumber), + gas: BigInt(event.transaction.gas), + v: BigInt(event.transaction.v), + value: BigInt(event.transaction.value), + gasPrice: + event.transaction.gasPrice && BigInt(event.transaction.gasPrice), + maxFeePerGas: + event.transaction.maxFeePerGas && + BigInt(event.transaction.maxFeePerGas), + maxPriorityFeePerGas: + event.transaction.maxPriorityFeePerGas && + BigInt(event.transaction.maxPriorityFeePerGas), + }, + })), + metadata: { + ...metadata, + cursor, + }, + } as { + events: { + logFilterName: string; + log: Log; + block: Block; + transaction: Transaction; + }[]; + metadata: { + pageEndsAtTimestamp: number; + counts: any[]; + cursor?: Cursor; + isLastPage: boolean; + }; + }; + }; + + private subscribeGql(query: DocumentNode, onNext: (value: any) => void) { + const observable = this.gqlClient.subscribe({ query }); + + return observable.subscribe({ + next(data) { + onNext(data); + }, + }); + } + + private async fetchHistoricalSync() { + const { gql } = await import("@apollo/client/core"); + + const queryPromises = Object.keys(this.networkCheckpoints).map( + async (chainId) => { + const { + data: { getNetworkHistoricalSync }, + } = await this.gqlClient.query({ + query: gql` + query getNetworkHistoricalSync($chainId: Int!) { + getNetworkHistoricalSync(chainId: $chainId) { + checkpoint + isSyncComplete + } + } + `, + variables: { chainId: Number(chainId) }, + }); + + const { checkpoint, isSyncComplete } = getNetworkHistoricalSync; + + if (checkpoint) { + this.handleNewHistoricalCheckpoint({ + chainId: Number(chainId), + timestamp: checkpoint, + }); + } + + if (isSyncComplete) { + this.handleHistoricalSyncComplete({ chainId: Number(chainId) }); + } + } + ); + + await Promise.all(queryPromises); + } +} diff --git a/packages/core/src/event-aggregator/service.test.ts b/packages/core/src/event-aggregator/internal-service.test.ts similarity index 93% rename from packages/core/src/event-aggregator/service.test.ts rename to packages/core/src/event-aggregator/internal-service.test.ts index b8a8112cf..d67c263cc 100644 --- a/packages/core/src/event-aggregator/service.test.ts +++ b/packages/core/src/event-aggregator/internal-service.test.ts @@ -7,7 +7,7 @@ import { encodeLogFilterKey } from "@/config/logFilterKey"; import type { LogFilter } from "@/config/logFilters"; import type { Network } from "@/config/networks"; -import { EventAggregatorService } from "./service"; +import { InternalEventAggregatorService } from "./internal-service"; beforeEach((context) => setupEventStore(context)); @@ -61,7 +61,7 @@ const logFilters: LogFilter[] = [ test("handleNewHistoricalCheckpoint emits new checkpoint", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -84,7 +84,7 @@ test("handleNewHistoricalCheckpoint emits new checkpoint", async (context) => { test("handleNewHistoricalCheckpoint does not emit new checkpoint if not best", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -114,7 +114,7 @@ test("handleNewHistoricalCheckpoint does not emit new checkpoint if not best", a test("handleHistoricalSyncComplete sets historicalSyncCompletedAt if final historical sync is complete", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -142,7 +142,7 @@ test("handleHistoricalSyncComplete sets historicalSyncCompletedAt if final histo test("handleNewRealtimeCheckpoint does not emit new checkpoint if historical sync is not complete", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -171,7 +171,7 @@ test("handleNewRealtimeCheckpoint does not emit new checkpoint if historical syn test("handleNewRealtimeCheckpoint emits new checkpoint if historical sync is complete", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -209,7 +209,7 @@ test("handleNewRealtimeCheckpoint emits new checkpoint if historical sync is com test("handleNewFinalityCheckpoint emits newFinalityCheckpoint", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -235,7 +235,7 @@ test("handleNewFinalityCheckpoint emits newFinalityCheckpoint", async (context) test("handleNewFinalityCheckpoint does not emit newFinalityCheckpoint if subsequent event is earlier", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, @@ -265,7 +265,7 @@ test("handleNewFinalityCheckpoint does not emit newFinalityCheckpoint if subsequ test("handleNewFinalityCheckpoint emits newFinalityCheckpoint if subsequent event is later", async (context) => { const { common, eventStore } = context; - const service = new EventAggregatorService({ + const service = new InternalEventAggregatorService({ common, eventStore, logFilters, diff --git a/packages/core/src/event-aggregator/internal-service.ts b/packages/core/src/event-aggregator/internal-service.ts new file mode 100644 index 000000000..1fe84434a --- /dev/null +++ b/packages/core/src/event-aggregator/internal-service.ts @@ -0,0 +1,80 @@ +import { type Hex } from "viem"; + +import type { LogFilterName } from "@/build/handlers"; +import type { LogEventMetadata, LogFilter } from "@/config/logFilters"; +import type { Network } from "@/config/networks"; +import type { EventStore } from "@/event-store/store"; +import type { Common } from "@/Ponder"; + +import { EventAggregatorService } from "./service"; + +export class InternalEventAggregatorService extends EventAggregatorService { + private eventStore: EventStore; + + constructor({ + common, + eventStore, + networks, + logFilters, + }: { + common: Common; + eventStore: EventStore; + networks: Network[]; + logFilters: LogFilter[]; + }) { + super({ + common, + networks, + logFilters, + }); + + this.eventStore = eventStore; + } + + /** Fetches events for all registered log filters between the specified timestamps. + * + * @param options.fromTimestamp Timestamp to start including events (inclusive). + * @param options.toTimestamp Timestamp to stop including events (inclusive). + * @param options.includeLogFilterEvents Map of log filter name -> selector -> ABI event item for which to include full event objects. + * @returns A promise resolving to an array of log events. + */ + async *getEvents({ + fromTimestamp, + toTimestamp, + includeLogFilterEvents, + }: { + fromTimestamp: number; + toTimestamp: number; + includeLogFilterEvents: { + [logFilterName: LogFilterName]: + | { + bySelector: { [selector: Hex]: LogEventMetadata }; + } + | undefined; + }; + }) { + const iterator = this.eventStore.getLogEvents({ + fromTimestamp, + toTimestamp, + filters: this.logFilters.map((logFilter) => ({ + name: logFilter.name, + chainId: logFilter.filter.chainId, + address: logFilter.filter.address, + topics: logFilter.filter.topics, + fromBlock: logFilter.filter.startBlock, + toBlock: logFilter.filter.endBlock, + includeEventSelectors: Object.keys( + includeLogFilterEvents[logFilter.name]?.bySelector ?? {} + ) as Hex[], + })), + }); + + for await (const page of iterator) { + const { events, metadata } = page; + + const decodedEvents = this.decodeEvents(events, includeLogFilterEvents); + + yield { events: decodedEvents, metadata }; + } + } +} diff --git a/packages/core/src/event-aggregator/service.ts b/packages/core/src/event-aggregator/service.ts index ce25d1e0f..5c9f99795 100644 --- a/packages/core/src/event-aggregator/service.ts +++ b/packages/core/src/event-aggregator/service.ts @@ -4,7 +4,6 @@ import { type Hex, decodeEventLog } from "viem"; import type { LogFilterName } from "@/build/handlers"; import type { LogEventMetadata, LogFilter } from "@/config/logFilters"; import type { Network } from "@/config/networks"; -import type { EventStore } from "@/event-store/store"; import type { Common } from "@/Ponder"; import type { Block } from "@/types/block"; import type { Log } from "@/types/log"; @@ -20,7 +19,7 @@ export type LogEvent = { transaction: Transaction; }; -type EventAggregatorEvents = { +export type EventAggregatorEvents = { /** * Emitted when a new event checkpoint is reached. This is the minimum timestamp * at which events are available across all registered networks. @@ -37,12 +36,11 @@ type EventAggregatorEvents = { reorg: { commonAncestorTimestamp: number }; }; -type EventAggregatorMetrics = {}; +export type EventAggregatorMetrics = {}; -export class EventAggregatorService extends Emittery { - private common: Common; - private eventStore: EventStore; - private logFilters: LogFilter[]; +export abstract class EventAggregatorService extends Emittery { + protected common; + protected logFilters: LogFilter[]; private networks: Network[]; // Minimum timestamp at which events are available (across all networks). @@ -54,7 +52,7 @@ export class EventAggregatorService extends Emittery { historicalSyncCompletedAt?: number; // Per-network event timestamp checkpoints. - private networkCheckpoints: Record< + protected networkCheckpoints: Record< number, { isHistoricalSyncComplete: boolean; @@ -68,19 +66,16 @@ export class EventAggregatorService extends Emittery { constructor({ common, - eventStore, networks, logFilters, }: { common: Common; - eventStore: EventStore; networks: Network[]; logFilters: LogFilter[]; }) { super(); this.common = common; - this.eventStore = eventStore; this.logFilters = logFilters; this.networks = networks; this.metrics = {}; @@ -106,7 +101,7 @@ export class EventAggregatorService extends Emittery { * @param options.includeLogFilterEvents Map of log filter name -> selector -> ABI event item for which to include full event objects. * @returns A promise resolving to an array of log events. */ - async *getEvents({ + abstract getEvents({ fromTimestamp, toTimestamp, includeLogFilterEvents, @@ -120,72 +115,22 @@ export class EventAggregatorService extends Emittery { } | undefined; }; - }) { - const iterator = this.eventStore.getLogEvents({ - fromTimestamp, - toTimestamp, - filters: this.logFilters.map((logFilter) => ({ - name: logFilter.name, - chainId: logFilter.filter.chainId, - address: logFilter.filter.address, - topics: logFilter.filter.topics, - fromBlock: logFilter.filter.startBlock, - toBlock: logFilter.filter.endBlock, - includeEventSelectors: Object.keys( - includeLogFilterEvents[logFilter.name]?.bySelector ?? {} - ) as Hex[], - })), - }); + }): AsyncGenerator<{ + events: LogEvent[]; + metadata: { + pageEndsAtTimestamp: number; + counts: { + logFilterName: string; + selector: Hex; + count: number; + }[]; + }; + }>; - for await (const page of iterator) { - const { events, metadata } = page; - - const decodedEvents = events.reduce((acc, event) => { - const selector = event.log.topics[0]; - if (!selector) { - throw new Error( - `Received an event log with no selector: ${event.log}` - ); - } - - const logEventMetadata = - includeLogFilterEvents[event.logFilterName]?.bySelector[selector]; - if (!logEventMetadata) { - throw new Error( - `Metadata for event ${event.logFilterName}:${selector} not found in includeLogFilterEvents` - ); - } - const { abiItem, safeName } = logEventMetadata; - - try { - const decodedLog = decodeEventLog({ - abi: [abiItem], - data: event.log.data, - topics: event.log.topics, - }); - - acc.push({ - logFilterName: event.logFilterName, - eventName: safeName, - params: decodedLog.args || {}, - log: event.log, - block: event.block, - transaction: event.transaction, - }); - } catch (err) { - // TODO: emit a warning here that a log was not decoded. - this.common.logger.error({ - service: "app", - msg: `Unable to decode log (skipping it): ${event.log}`, - error: err as Error, - }); - } - - return acc; - }, []); - - yield { events: decodedEvents, metadata }; - } + subscribeToSyncEvents?(): Promise; + + kill() { + this.clearListeners(); } handleNewHistoricalCheckpoint = ({ @@ -264,6 +209,64 @@ export class EventAggregatorService extends Emittery { this.emit("reorg", { commonAncestorTimestamp }); }; + protected decodeEvents = ( + events: { + logFilterName: string; + log: Log; + block: Block; + transaction: Transaction; + }[], + includeLogFilterEvents: { + [logFilterName: LogFilterName]: + | { + bySelector: { [selector: Hex]: LogEventMetadata }; + } + | undefined; + } + ) => { + return events.reduce((acc, event) => { + const selector = event.log.topics[0]; + if (!selector) { + throw new Error(`Received an event log with no selector: ${event.log}`); + } + + const logEventMetadata = + includeLogFilterEvents[event.logFilterName]?.bySelector[selector]; + if (!logEventMetadata) { + throw new Error( + `Metadata for event ${event.logFilterName}:${selector} not found in includeLogFilterEvents` + ); + } + const { abiItem, safeName } = logEventMetadata; + + try { + const decodedLog = decodeEventLog({ + abi: [abiItem], + data: event.log.data, + topics: event.log.topics, + }); + + acc.push({ + logFilterName: event.logFilterName, + eventName: safeName, + params: decodedLog.args || {}, + log: event.log, + block: event.block, + transaction: event.transaction, + }); + } catch (err) { + // TODO: emit a warning here that a log was not decoded. + this.common.logger.error({ + service: "app", + msg: `Unable to decode log (skipping it): ${event.log}`, + error: err as Error, + }); + } + + return acc; + }, []); + }; + private recalculateCheckpoint = () => { const checkpoints = Object.values(this.networkCheckpoints).map((n) => n.isHistoricalSyncComplete diff --git a/packages/core/src/event-store/postgres/store.ts b/packages/core/src/event-store/postgres/store.ts index 4e5344c3e..06a4d258b 100644 --- a/packages/core/src/event-store/postgres/store.ts +++ b/packages/core/src/event-store/postgres/store.ts @@ -19,7 +19,7 @@ import { intToBlob } from "@/utils/encode"; import { mergeIntervals } from "@/utils/intervals"; import { range } from "@/utils/range"; -import type { EventStore } from "../store"; +import type { Cursor, EventStore } from "../store"; import { type EventStoreTables, type InsertableBlock, @@ -417,6 +417,7 @@ export class PostgresEventStore implements EventStore { toTimestamp, filters = [], pageSize = 10_000, + cursor, }: { fromTimestamp: number; toTimestamp: number; @@ -430,6 +431,7 @@ export class PostgresEventStore implements EventStore { includeEventSelectors?: Hex[]; }[]; pageSize: number; + cursor?: Cursor; }) { const baseQuery = this.db .with( @@ -618,15 +620,6 @@ export class PostgresEventStore implements EventStore { count: Number(c.count), })); - let cursor: - | { - timestamp: Buffer; - chainId: number; - blockNumber: Buffer; - logIndex: number; - } - | undefined = undefined; - while (true) { let query = includedLogsBaseQuery.limit(pageSize); if (cursor) { @@ -779,6 +772,7 @@ export class PostgresEventStore implements EventStore { metadata: { pageEndsAtTimestamp, counts: eventCounts, + cursor, }, }; diff --git a/packages/core/src/event-store/sqlite/store.ts b/packages/core/src/event-store/sqlite/store.ts index 908c7de6c..63ecee826 100644 --- a/packages/core/src/event-store/sqlite/store.ts +++ b/packages/core/src/event-store/sqlite/store.ts @@ -18,7 +18,7 @@ import { intToBlob } from "@/utils/encode"; import { mergeIntervals } from "@/utils/intervals"; import { range } from "@/utils/range"; -import type { EventStore } from "../store"; +import type { Cursor, EventStore } from "../store"; import { type EventStoreTables, type InsertableBlock, @@ -388,6 +388,7 @@ export class SqliteEventStore implements EventStore { toTimestamp, filters = [], pageSize = 10_000, + cursor, }: { fromTimestamp: number; toTimestamp: number; @@ -401,6 +402,7 @@ export class SqliteEventStore implements EventStore { includeEventSelectors?: Hex[]; }[]; pageSize: number; + cursor?: Cursor; }) { const baseQuery = this.db .with( @@ -596,15 +598,6 @@ export class SqliteEventStore implements EventStore { count: Number(c.count), })); - let cursor: - | { - timestamp: Buffer; - chainId: number; - blockNumber: Buffer; - logIndex: number; - } - | undefined = undefined; - while (true) { let query = includedLogsBaseQuery.limit(pageSize); if (cursor) { @@ -757,6 +750,7 @@ export class SqliteEventStore implements EventStore { metadata: { pageEndsAtTimestamp, counts: eventCounts, + cursor, }, }; diff --git a/packages/core/src/event-store/store.ts b/packages/core/src/event-store/store.ts index 4d62494d8..9457c25f7 100644 --- a/packages/core/src/event-store/store.ts +++ b/packages/core/src/event-store/store.ts @@ -27,6 +27,13 @@ export type ContractReadResult = { result: Hex; }; +export type Cursor = { + timestamp: Buffer; + chainId: number; + blockNumber: Buffer; + logIndex: number; +}; + export interface EventStore { kind: "sqlite" | "postgres"; db: Kysely; @@ -106,6 +113,7 @@ export interface EventStore { includeEventSelectors?: Hex[]; }[]; pageSize?: number; + cursor?: Cursor; }): AsyncGenerator<{ events: { logFilterName: string; @@ -120,6 +128,7 @@ export interface EventStore { selector: Hex; count: number; }[]; + cursor?: Cursor; }; }>; } diff --git a/packages/core/src/indexing-server/resolvers.ts b/packages/core/src/indexing-server/resolvers.ts new file mode 100644 index 000000000..de21f0ccd --- /dev/null +++ b/packages/core/src/indexing-server/resolvers.ts @@ -0,0 +1,109 @@ +import type { IFieldResolver, IResolvers } from "@graphql-tools/utils"; +import ApolloBigInt from "apollo-type-bigint"; +import { PubSub } from "graphql-subscriptions"; + +import { EventStore } from "@/event-store/store"; +import { blobToBigInt } from "@/utils/decode"; +import { intToBlob } from "@/utils/encode"; + +export const HISTORICAL_CHECKPOINT = "historicalCheckpoint"; +export const SYNC_COMPLETE = "syncComplete"; +export const REALTIME_CHECKPOINT = "realtimeCheckpoint"; +export const FINALITY_CHECKPOINT = "finalityCheckpoint"; +export const SHALLOW_REORG = "shallowReorg"; + +const PAGE_SIZE = 10_000; + +export type NetworkCheckpoints = Record< + number, + { + isHistoricalSyncComplete: boolean; + historicalCheckpoint: number; + } +>; + +export const getResolvers = ({ + eventStore, + pubsub, + networkCheckpoints, +}: { + eventStore: EventStore; + pubsub: PubSub; + networkCheckpoints: NetworkCheckpoints; +}): IResolvers => { + const getLogEvents: IFieldResolver = async (_, args) => { + const { fromTimestamp, toTimestamp, filters, cursor } = args; + + const iterator = eventStore.getLogEvents({ + fromTimestamp, + toTimestamp, + filters, + cursor: cursor && { + ...cursor, + timestamp: intToBlob(cursor.timestamp), + blockNumber: intToBlob(cursor.blockNumber), + }, + pageSize: PAGE_SIZE, + }); + + for await (const page of iterator) { + const { events, metadata } = page; + + return { + events, + metadata: { + ...metadata, + cursor: metadata.cursor && { + ...metadata.cursor, + timestamp: Number(blobToBigInt(metadata.cursor.timestamp)), + blockNumber: Number(blobToBigInt(metadata.cursor.blockNumber)), + }, + isLastPage: events.length < PAGE_SIZE, + }, + }; + } + + throw new Error("getLogEvents iterator should run atleast once"); + }; + + const getNetworkHistoricalSync: IFieldResolver = async ( + _, + args + ) => { + const { chainId } = args; + const { historicalCheckpoint, isHistoricalSyncComplete } = + networkCheckpoints[chainId]; + + return { + checkpoint: historicalCheckpoint, + isSyncComplete: isHistoricalSyncComplete, + }; + }; + + return { + BigInt: new ApolloBigInt("bigInt"), + + Query: { + getLogEvents, + getNetworkHistoricalSync, + }, + + Subscription: { + onNewHistoricalCheckpoint: { + subscribe: () => pubsub.asyncIterator(HISTORICAL_CHECKPOINT), + }, + onHistoricalSyncComplete: { + subscribe: () => pubsub.asyncIterator(SYNC_COMPLETE), + }, + onNewRealtimeCheckpoint: { + subscribe: () => pubsub.asyncIterator(REALTIME_CHECKPOINT), + }, + onNewFinalityCheckpoint: { + subscribe: () => pubsub.asyncIterator(FINALITY_CHECKPOINT), + }, + onReorg: { + subscribe: () => pubsub.asyncIterator(SHALLOW_REORG), + }, + }, + }; +}; diff --git a/packages/core/src/indexing-server/schema.ts b/packages/core/src/indexing-server/schema.ts new file mode 100644 index 000000000..5cf63ed17 --- /dev/null +++ b/packages/core/src/indexing-server/schema.ts @@ -0,0 +1,154 @@ +export const indexingSchema = ` + scalar BigInt + + input Filter { + name: String! + chainId: Int! + address: [String!] + topics: [[String!]] + fromBlock: Int + toBlock: Int + includeEventSelectors: [String] + } + + input CursorInput { + timestamp: Int! + chainId: Int! + blockNumber: Int! + logIndex: Int! + } + + # src/types/log.ts + type Log { + id: String! + address: String! + blockHash: String! + blockNumber: BigInt! + data: String! + logIndex: Int! + removed: Boolean! + topics: [String!] + transactionHash: String! + transactionIndex: Int! + } + + # src/types/log.ts + type Block { + baseFeePerGas: BigInt + difficulty: BigInt! + extraData: String! + gasLimit: BigInt! + gasUsed: BigInt! + hash: String! + logsBloom: String! + miner: String! + mixHash: String! + nonce: String! + number: BigInt! + parentHash: String! + receiptsRoot: String! + sha3Uncles: String! + size: BigInt! + stateRoot: String! + timestamp: BigInt! + totalDifficulty: BigInt! + transactionsRoot: String! + } + + type AccessListItem { + address: String! + storageKeys: [String!] + } + + # src/types/transaction.ts + type Transaction { + blockHash: String! + blockNumber: BigInt! + from: String! + gas: BigInt! + hash: String! + input: String! + nonce: Int! + r: String! + s: String! + to: String + transactionIndex: Int! + v: BigInt! + value: BigInt! + type: String! + gasPrice: BigInt + accessList: [AccessListItem] + maxFeePerGas: BigInt + maxPriorityFeePerGas: BigInt + } + + type Event { + logFilterName: String! + log: Log! + block: Block! + transaction: Transaction! + } + + type Count { + logFilterName: String! + selector: String! + count: Int! + } + + type Cursor { + timestamp: Int! + chainId: Int! + blockNumber: Int! + logIndex: Int! + } + + type Metadata { + pageEndsAtTimestamp: Int + counts: [Count!]! + cursor: Cursor + isLastPage: Boolean! + } + + type LogEventsResult { + events: [Event!] + metadata: Metadata + } + + type NetworkHistoricalSync { + checkpoint: Int! + isSyncComplete: Int! + } + + type Query { + getLogEvents( + fromTimestamp: Int!, + toTimestamp: Int!, + filters: [Filter!], + cursor: CursorInput + ): LogEventsResult! + getNetworkHistoricalSync( + chainId: Int! + ): NetworkHistoricalSync! + } + + type Checkpoint { + chainId: Int! + timestamp: Int! + } + + type SyncComplete { + chainId: Int! + } + + type Reorg { + commonAncestorTimestamp: Int! + } + + type Subscription { + onNewHistoricalCheckpoint: Checkpoint! + onHistoricalSyncComplete: SyncComplete! + onNewRealtimeCheckpoint: Checkpoint! + onNewFinalityCheckpoint: Checkpoint! + onReorg: Reorg! + } +`; diff --git a/packages/core/src/indexing-server/service.ts b/packages/core/src/indexing-server/service.ts new file mode 100644 index 000000000..0b16e7380 --- /dev/null +++ b/packages/core/src/indexing-server/service.ts @@ -0,0 +1,156 @@ +import { makeExecutableSchema } from "@graphql-tools/schema"; +import express from "express"; +import { graphqlHTTP } from "express-graphql"; +import { PubSub } from "graphql-subscriptions"; +import { useServer } from "graphql-ws/lib/use/ws"; +import { Server as WebSocketServer } from "ws"; + +import { Network } from "@/config/networks"; +import { EventStore } from "@/event-store/store"; +import type { Common } from "@/Ponder"; +import { Server } from "@/utils/server"; + +import { + FINALITY_CHECKPOINT, + getResolvers, + HISTORICAL_CHECKPOINT, + NetworkCheckpoints, + REALTIME_CHECKPOINT, + SHALLOW_REORG, + SYNC_COMPLETE, +} from "./resolvers"; +import { indexingSchema } from "./schema"; + +export class IndexingServerService { + private common: Common; + private eventStore: EventStore; + private pubsub: PubSub; + private server: Server; + + // Per-network checkpoints. + private networkCheckpoints: NetworkCheckpoints; + + app?: express.Express; + + isSyncComplete = false; + + constructor({ + common, + eventStore, + networks, + }: { + common: Common; + eventStore: EventStore; + networks: Network[]; + }) { + this.common = common; + this.eventStore = eventStore; + + this.server = new Server({ + common, + port: common.options.indexingPort, + }); + + // https://www.apollographql.com/docs/apollo-server/data/subscriptions#the-pubsub-class + this.pubsub = new PubSub(); + + this.networkCheckpoints = networks.reduce( + (acc: NetworkCheckpoints, network) => { + acc[network.chainId] = { + isHistoricalSyncComplete: false, + historicalCheckpoint: 0, + }; + + return acc; + }, + {} + ); + } + + get port() { + return this.server.port; + } + + async start() { + const server = await this.server.start(); + this.common.metrics.ponder_server_port.set(this.server.port); + + const schema = makeExecutableSchema({ + typeDefs: indexingSchema, + resolvers: getResolvers({ + eventStore: this.eventStore, + pubsub: this.pubsub, + networkCheckpoints: this.networkCheckpoints, + }), + }); + + const graphqlMiddleware = graphqlHTTP({ + schema, + graphiql: true, + }); + + this.server.app?.use("/graphql", graphqlMiddleware); + + // create and use the websocket server + const wsServer = new WebSocketServer({ + server, + path: "/graphql", + }); + + useServer({ schema }, wsServer); + } + + async kill() { + await this.server.kill(); + } + + setIsSyncComplete() { + this.isSyncComplete = true; + this.server.isHealthy = true; + + this.common.logger.info({ + service: "indexing-server", + msg: `Started responding as healthy`, + }); + } + + handleNewHistoricalCheckpoint(data: { chainId: number; timestamp: number }) { + this.pubsub.publish(HISTORICAL_CHECKPOINT, { + onNewHistoricalCheckpoint: data, + }); + + this.networkCheckpoints[data.chainId].historicalCheckpoint = data.timestamp; + } + + handleHistoricalSyncComplete(data: { chainId: number }) { + this.pubsub.publish(SYNC_COMPLETE, { + onHistoricalSyncComplete: data, + }); + + this.networkCheckpoints[data.chainId].isHistoricalSyncComplete = true; + + // If every network has completed the historical sync, call setIsSyncComplete. + const networkCheckpoints = Object.values(this.networkCheckpoints); + if (networkCheckpoints.every((n) => n.isHistoricalSyncComplete)) { + this.setIsSyncComplete(); + } + } + + handleNewRealtimeCheckpoint(data: { chainId: number; timestamp: number }) { + this.pubsub.publish(REALTIME_CHECKPOINT, { + onNewRealtimeCheckpoint: data, + }); + } + + handleNewFinalityCheckpoint(data: { chainId: number; timestamp: number }) { + this.pubsub.publish(FINALITY_CHECKPOINT, { + onNewFinalityCheckpoint: data, + }); + } + + handleReorg(data: { commonAncestorTimestamp: number }) { + this.pubsub.publish(SHALLOW_REORG, { + onReorg: data, + }); + } +} diff --git a/packages/core/src/server/service.ts b/packages/core/src/server/service.ts index 9532f56ec..212868ec9 100644 --- a/packages/core/src/server/service.ts +++ b/packages/core/src/server/service.ts @@ -1,36 +1,37 @@ -import cors from "cors"; -import express from "express"; import { graphqlHTTP } from "express-graphql"; import type { GraphQLSchema } from "graphql"; -import { createHttpTerminator } from "http-terminator"; -import { createServer, Server } from "node:http"; import type { Common } from "@/Ponder"; import type { UserStore } from "@/user-store/store"; +import { Server } from "@/utils/server"; import { startClock } from "@/utils/timer"; export class ServerService { private common: Common; private userStore: UserStore; - - private port: number; - app?: express.Express; - - private terminate?: () => Promise; + private server: Server; isHistoricalEventProcessingComplete = false; constructor({ common, userStore }: { common: Common; userStore: UserStore }) { this.common = common; this.userStore = userStore; - this.port = this.common.options.port; + + this.server = new Server({ + common, + port: common.options.port, + }); + } + + get app() { + return this.server.app; } async start() { - this.app = express(); - this.app.use(cors()); + await this.server.start(); + this.common.metrics.ponder_server_port.set(this.server.port); - this.app.use((req, res, next) => { + this.server.app?.use((req, res, next) => { const endClock = startClock(); res.on("finish", () => { const responseDuration = endClock(); @@ -64,79 +65,6 @@ export class ServerService { }); next(); }); - - const server = await new Promise((resolve, reject) => { - const server = createServer(this.app) - .on("error", (error) => { - if ((error as any).code === "EADDRINUSE") { - this.common.logger.warn({ - service: "server", - msg: `Port ${this.port} was in use, trying port ${this.port + 1}`, - }); - this.port += 1; - setTimeout(() => { - server.close(); - server.listen(this.port); - }, 5); - } else { - reject(error); - } - }) - .on("listening", () => { - this.common.metrics.ponder_server_port.set(this.port); - resolve(server); - }) - .listen(this.port); - }); - - const terminator = createHttpTerminator({ server }); - this.terminate = () => terminator.terminate(); - - this.common.logger.info({ - service: "server", - msg: `Started listening on port ${this.port}`, - }); - - this.app.post("/metrics", async (_, res) => { - try { - res.set("Content-Type", "text/plain; version=0.0.4; charset=utf-8"); - res.end(await this.common.metrics.getMetrics()); - } catch (error) { - res.status(500).end(error); - } - }); - - this.app.get("/metrics", async (_, res) => { - try { - res.set("Content-Type", "text/plain; version=0.0.4; charset=utf-8"); - res.end(await this.common.metrics.getMetrics()); - } catch (error) { - res.status(500).end(error); - } - }); - - // By default, the server will respond as unhealthy until historical events have - // been processed OR 4.5 minutes have passed since the app was created. This - // enables zero-downtime deployments on PaaS platforms like Railway and Render. - // Also see https://github.com/0xOlias/ponder/issues/24 - this.app.get("/health", (_, res) => { - if (this.isHistoricalEventProcessingComplete) { - return res.status(200).send(); - } - - const max = this.common.options.maxHealthcheckDuration; - const elapsed = Math.floor(process.uptime()); - - if (elapsed > max) { - this.common.logger.warn({ - service: "server", - msg: `Historical sync duration has exceeded the max healthcheck duration of ${max} seconds (current: ${elapsed}). Sevice is now responding as healthy and may serve incomplete data.`, - }); - return res.status(200).send(); - } - - return res.status(503).send(); - }); } reload({ graphqlSchema }: { graphqlSchema: GraphQLSchema }) { @@ -147,19 +75,16 @@ export class ServerService { graphiql: true, }); - this.app?.use("/", graphqlMiddleware); + this.server.app?.use("/", graphqlMiddleware); } async kill() { - await this.terminate?.(); - this.common.logger.debug({ - service: "server", - msg: `Stopped listening on port ${this.port}`, - }); + await this.server.kill(); } setIsHistoricalEventProcessingComplete() { this.isHistoricalEventProcessingComplete = true; + this.server.isHealthy = true; this.common.logger.info({ service: "server", diff --git a/packages/core/src/utils/graphql-client.ts b/packages/core/src/utils/graphql-client.ts new file mode 100644 index 000000000..d1bf907c6 --- /dev/null +++ b/packages/core/src/utils/graphql-client.ts @@ -0,0 +1,41 @@ +import { ApolloClient, HttpLink, InMemoryCache, split } from "@apollo/client"; +import { GraphQLWsLink } from "@apollo/client/link/subscriptions"; +import { getMainDefinition } from "@apollo/client/utilities"; +import { createClient } from "graphql-ws"; +import WebSocket from "ws"; + +/** + * Method to create a client for GQL queries and subscriptions + * https://www.apollographql.com/docs/react/data/subscriptions + */ +export const createGqlClient = (gqlEndpoint: string) => { + const httpLink = new HttpLink({ + uri: gqlEndpoint, + }); + + const wsLink = new GraphQLWsLink( + createClient({ + url: gqlEndpoint, + webSocketImpl: WebSocket, + }) + ); + + const splitLink = split( + ({ query }) => { + const definition = getMainDefinition(query); + return ( + definition.kind === "OperationDefinition" && + definition.operation === "subscription" + ); + }, + wsLink, + httpLink + ); + + const client = new ApolloClient({ + link: splitLink, + cache: new InMemoryCache(), + }); + + return client; +}; diff --git a/packages/core/src/utils/server.ts b/packages/core/src/utils/server.ts new file mode 100644 index 000000000..48487345b --- /dev/null +++ b/packages/core/src/utils/server.ts @@ -0,0 +1,110 @@ +import cors from "cors"; +import express from "express"; +import { createHttpTerminator } from "http-terminator"; +import { type Server as ServerInterface, createServer } from "node:http"; + +import type { Common } from "@/Ponder"; + +export class Server { + private common: Common; + + port: number; + app?: express.Express; + + private terminate?: () => Promise; + + constructor({ common, port }: { common: Common; port: number }) { + this.common = common; + this.port = port; + } + + isHealthy = false; + + async start() { + this.app = express(); + this.app.use(cors()); + + const server = await new Promise((resolve, reject) => { + const server = createServer(this.app) + .on("error", (error) => { + if ((error as any).code === "EADDRINUSE") { + this.common.logger.warn({ + service: "server", + msg: `Port ${this.port} was in use, trying port ${this.port + 1}`, + }); + this.port += 1; + setTimeout(() => { + server.close(); + server.listen(this.port); + }, 5); + } else { + reject(error); + } + }) + .on("listening", () => { + resolve(server); + }) + .listen(this.port); + }); + + const terminator = createHttpTerminator({ server }); + this.terminate = () => terminator.terminate(); + + this.common.logger.info({ + service: "server", + msg: `Started listening on port ${this.port}`, + }); + + this.app.post("/metrics", async (_, res) => { + try { + res.set("Content-Type", "text/plain; version=0.0.4; charset=utf-8"); + res.end(await this.common.metrics.getMetrics()); + } catch (error) { + res.status(500).end(error); + } + }); + + this.app.get("/metrics", async (_, res) => { + try { + res.set("Content-Type", "text/plain; version=0.0.4; charset=utf-8"); + res.end(await this.common.metrics.getMetrics()); + } catch (error) { + res.status(500).end(error); + } + }); + + // By default, the server will respond as unhealthy until historical events have + // been processed OR 4.5 minutes have passed since the app was created. This + // enables zero-downtime deployments on PaaS platforms like Railway and Render. + // Also see https://github.com/0xOlias/ponder/issues/24 + this.app.get("/health", (_, res) => { + if (this.isHealthy) { + return res.status(200).send(); + } + + const max = this.common.options.maxHealthcheckDuration; + const elapsed = Math.floor(process.uptime()); + + if (elapsed > max) { + this.common.logger.warn({ + service: "server", + msg: `Historical sync duration has exceeded the max healthcheck duration of ${max} seconds (current: ${elapsed}). Sevice is now responding as healthy and may serve incomplete data.`, + }); + return res.status(200).send(); + } + + return res.status(503).send(); + }); + + return server; + } + + async kill() { + await this.terminate?.(); + + this.common.logger.debug({ + service: "server", + msg: `Stopped listening on port ${this.port}`, + }); + } +} diff --git a/packages/core/tsconfig.base.json b/packages/core/tsconfig.base.json index 3fccb2508..ee4e29da2 100644 --- a/packages/core/tsconfig.base.json +++ b/packages/core/tsconfig.base.json @@ -28,7 +28,7 @@ "resolveJsonModule": true, // Language and environment - "moduleResolution": "NodeNext", + "moduleResolution": "Node", // By using Node, ECMAScript module packages (@apollo packages) get resolved properly "module": "ESNext", "target": "ESNext", "lib": [ diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 28fb5f7df..9202af6da 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -285,12 +285,21 @@ importers: packages/core: dependencies: + '@apollo/client': + specifier: ^3.8.3 + version: 3.8.3(graphql-ws@5.14.0)(graphql@15.8.0)(react@17.0.2) '@babel/code-frame': specifier: ^7.18.6 version: 7.18.6 + '@graphql-tools/schema': + specifier: ^10.0.0 + version: 10.0.0(graphql@15.8.0) '@jridgewell/trace-mapping': specifier: ^0.3.17 version: 0.3.17 + apollo-type-bigint: + specifier: ^0.1.3 + version: 0.1.3(graphql@15.8.0) async-mutex: specifier: ^0.4.0 version: 0.4.0 @@ -339,6 +348,12 @@ importers: graphql: specifier: ^15.3.0 version: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + graphql-subscriptions: + specifier: ^2.0.0 + version: 2.0.0(graphql@15.8.0) + graphql-ws: + specifier: ^5.14.0 + version: 5.14.0(graphql@15.8.0) http-terminator: specifier: ^3.2.0 version: 3.2.0 @@ -387,7 +402,13 @@ importers: viem: specifier: ^1.2.6 version: 1.2.6(typescript@5.1.3) + ws: + specifier: ^8.14.1 + version: 8.14.1 devDependencies: + '@graphql-tools/utils': + specifier: ^10.0.6 + version: 10.0.6(graphql@15.8.0) '@types/babel__code-frame': specifier: ^7.0.3 version: 7.0.3 @@ -427,6 +448,9 @@ importers: '@types/supertest': specifier: ^2.0.12 version: 2.0.12 + '@types/ws': + specifier: ^8.5.5 + version: 8.5.5 '@viem/anvil': specifier: ^0.0.6 version: 0.0.6 @@ -543,6 +567,42 @@ packages: '@jridgewell/gen-mapping': 0.1.1 '@jridgewell/trace-mapping': 0.3.17 + /@apollo/client@3.8.3(graphql-ws@5.14.0)(graphql@15.8.0)(react@17.0.2): + resolution: {integrity: sha512-mK86JM6hCpMEBGDgdO9U8ZYS8r9lPjXE1tVGpJMdSFUsIcXpmEfHUAbbFpPtYmxn8Qa7XsYy0dwDaDhpf4UUPw==} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 + graphql-ws: ^5.5.5 + react: ^16.8.0 || ^17.0.0 || ^18.0.0 + react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 + subscriptions-transport-ws: ^0.9.0 || ^0.11.0 + peerDependenciesMeta: + graphql-ws: + optional: true + react: + optional: true + react-dom: + optional: true + subscriptions-transport-ws: + optional: true + dependencies: + '@graphql-typed-document-node/core': 3.2.0(graphql@15.8.0) + '@wry/context': 0.7.3 + '@wry/equality': 0.5.6 + '@wry/trie': 0.4.3 + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + graphql-tag: 2.12.6(graphql@15.8.0) + graphql-ws: 5.14.0(graphql@15.8.0) + hoist-non-react-statics: 3.3.2 + optimism: 0.17.5 + prop-types: 15.8.1 + react: 17.0.2 + response-iterator: 0.2.6 + symbol-observable: 4.0.0 + ts-invariant: 0.10.3 + tslib: 2.5.3 + zen-observable-ts: 1.2.5 + dev: false + /@babel/code-frame@7.18.6: resolution: {integrity: sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q==} engines: {node: '>=6.9.0'} @@ -2708,6 +2768,48 @@ packages: assemblyscript: 0.19.10 dev: true + /@graphql-tools/merge@9.0.0(graphql@15.8.0): + resolution: {integrity: sha512-J7/xqjkGTTwOJmaJQJ2C+VDBDOWJL3lKrHJN4yMaRLAJH3PosB7GiPRaSDZdErs0+F77sH2MKs2haMMkywzx7Q==} + engines: {node: '>=16.0.0'} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/utils': 10.0.6(graphql@15.8.0) + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + tslib: 2.5.3 + dev: false + + /@graphql-tools/schema@10.0.0(graphql@15.8.0): + resolution: {integrity: sha512-kf3qOXMFcMs2f/S8Y3A8fm/2w+GaHAkfr3Gnhh2LOug/JgpY/ywgFVxO3jOeSpSEdoYcDKLcXVjMigNbY4AdQg==} + engines: {node: '>=16.0.0'} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-tools/merge': 9.0.0(graphql@15.8.0) + '@graphql-tools/utils': 10.0.6(graphql@15.8.0) + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + tslib: 2.5.3 + value-or-promise: 1.0.12 + dev: false + + /@graphql-tools/utils@10.0.6(graphql@15.8.0): + resolution: {integrity: sha512-hZMjl/BbX10iagovakgf3IiqArx8TPsotq5pwBld37uIX1JiZoSbgbCIFol7u55bh32o6cfDEiiJgfAD5fbeyQ==} + engines: {node: '>=16.0.0'} + peerDependencies: + graphql: ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + '@graphql-typed-document-node/core': 3.2.0(graphql@15.8.0) + dset: 3.1.2 + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + tslib: 2.5.3 + + /@graphql-typed-document-node/core@3.2.0(graphql@15.8.0): + resolution: {integrity: sha512-mB9oAsNCm9aM3/SOv4YtBMqZbYj10R7dkq8byBqxGY/ncFwhf2oQzMV+LCRlWoDSEBJ3COiR1yeDvMtsoOsuFQ==} + peerDependencies: + graphql: ^0.8.0 || ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 || ^17.0.0 + dependencies: + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + /@headlessui/react@1.7.15(react-dom@18.2.0)(react@18.2.0): resolution: {integrity: sha512-OTO0XtoRQ6JPB1cKNFYBZv2Q0JMqMGNhYP1CjPvcJvjz8YGokz8oAj89HIYZGN0gZzn/4kk9iUpmMF4Q21Gsqw==} engines: {node: '>=10'} @@ -3869,6 +3971,12 @@ packages: '@types/node': 18.16.18 dev: true + /@types/ws@8.5.5: + resolution: {integrity: sha512-lwhs8hktwxSjf9UaZ9tG5M03PGogvFaH8gUgLNbN9HKIg0dvv6q+gkSuJ8HN4/VbyxkuLzCjlN7GquQ0gUJfIg==} + dependencies: + '@types/node': 18.16.18 + dev: true + /@types/yoga-layout@1.9.2: resolution: {integrity: sha512-S9q47ByT2pPvD65IvrWp7qppVMpk9WGMbVq9wbWZOHg6tnXSD4vyhao6nOSBwwfDdV2p3Kx9evA9vI+XWTfDvw==} dev: false @@ -4065,7 +4173,7 @@ packages: execa: 7.1.1 get-port: 6.1.2 http-proxy: 1.18.1 - ws: 8.13.0 + ws: 8.14.1 transitivePeerDependencies: - bufferutil - debug @@ -4149,6 +4257,27 @@ packages: tslib: 2.5.3 dev: true + /@wry/context@0.7.3: + resolution: {integrity: sha512-Nl8WTesHp89RF803Se9X3IiHjdmLBrIvPMaJkl+rKVJAYyPsz1TEUbu89943HpvujtSJgDUx9W4vZw3K1Mr3sA==} + engines: {node: '>=8'} + dependencies: + tslib: 2.5.3 + dev: false + + /@wry/equality@0.5.6: + resolution: {integrity: sha512-D46sfMTngaYlrH+OspKf8mIJETntFnf6Hsjb0V41jAXJ7Bx2kB8Rv8RCUujuVWYttFtHkUNp7g+FwxNQAr6mXA==} + engines: {node: '>=8'} + dependencies: + tslib: 2.5.3 + dev: false + + /@wry/trie@0.4.3: + resolution: {integrity: sha512-I6bHwH0fSf6RqQcnnXLJKhkSXG45MFral3GxPaY4uAl0LYDZM+YDVDAiU9bYwjTuysy1S0IeecWtmq1SZA3M1w==} + engines: {node: '>=8'} + dependencies: + tslib: 2.5.3 + dev: false + /JSONStream@1.3.2: resolution: {integrity: sha512-mn0KSip7N4e0UDPZHnqDsHECo5uGQrixQKnAskOM1BIB8hd7QKbd6il8IPRPudPHOeHiECoCFqhyMaRO9+nWyA==} hasBin: true @@ -4375,6 +4504,14 @@ packages: - debug dev: true + /apollo-type-bigint@0.1.3(graphql@15.8.0): + resolution: {integrity: sha512-nyfwEWRZ+kon3Nnot20DufGm2EHZrkJoryYzw3soD+USdxhkcW434w1c/n+mjMLQDl86Z6EvlkvMX5Lordf2Wg==} + peerDependencies: + graphql: '>=0.13.0' + dependencies: + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + dev: false + /app-module-path@2.2.0: resolution: {integrity: sha512-gkco+qxENJV+8vFcDiiFhuoSvRXb2a/QPqpSoWhVz829VNJfOTnELbBmPmNKFxf3xdNnw4DWCkzkDaavcX/1YQ==} dev: true @@ -6096,7 +6233,6 @@ packages: /dset@3.1.2: resolution: {integrity: sha512-g/M9sqy3oHe477Ar4voQxWtaPIFw1jTdKZuomOjhCcBx9nHUNn0pu6NopuFFrTh/TRZIKEj+76vLWFu9BNKk+Q==} engines: {node: '>=4'} - dev: false /eastasianwidth@0.2.0: resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} @@ -7941,6 +8077,34 @@ packages: graphql: 16.6.0 dev: true + /graphql-subscriptions@2.0.0(graphql@15.8.0): + resolution: {integrity: sha512-s6k2b8mmt9gF9pEfkxsaO1lTxaySfKoEJzEfmwguBbQ//Oq23hIXCfR1hm4kdh5hnR20RdwB+s3BCb+0duHSZA==} + peerDependencies: + graphql: ^15.7.2 || ^16.0.0 + dependencies: + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + iterall: 1.3.0 + dev: false + + /graphql-tag@2.12.6(graphql@15.8.0): + resolution: {integrity: sha512-FdSNcu2QQcWnM2VNvSCCDCVS5PpPqpzgFT8+GXzqJuoDd0CBncxCY278u4mhRO7tMgo2JjgJA5aZ+nWSQ/Z+xg==} + engines: {node: '>=10'} + peerDependencies: + graphql: ^0.9.0 || ^0.10.0 || ^0.11.0 || ^0.12.0 || ^0.13.0 || ^14.0.0 || ^15.0.0 || ^16.0.0 + dependencies: + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + tslib: 2.5.3 + dev: false + + /graphql-ws@5.14.0(graphql@15.8.0): + resolution: {integrity: sha512-itrUTQZP/TgswR4GSSYuwWUzrE/w5GhbwM2GX3ic2U7aw33jgEsayfIlvaj7/GcIvZgNMzsPTrE5hqPuFUiE5g==} + engines: {node: '>=10'} + peerDependencies: + graphql: '>=0.11 <=16' + dependencies: + graphql: 15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty) + dev: false + /graphql@15.5.0: resolution: {integrity: sha512-OmaM7y0kaK31NKG31q4YbD2beNYa6jBBKtMFT6gLYJljHLJr42IqJ8KX08u3Li/0ifzTU5HjmoOOrwa5BRLeDA==} engines: {node: '>= 10.x'} @@ -7949,7 +8113,6 @@ packages: /graphql@15.8.0(patch_hash=qb2u6q36ugvr6swjr2i3rg76ty): resolution: {integrity: sha512-5gghUc24tP9HRznNpV2+FIoq3xKkj5dTQqf4v0CpdPbFVwFkWoxOM+o+2OC9ZSvjEMTjfmG9QT+gcvggTwW1zw==} engines: {node: '>= 10.x'} - dev: false patched: true /graphql@16.6.0: @@ -8167,6 +8330,12 @@ packages: minimalistic-crypto-utils: 1.0.1 dev: true + /hoist-non-react-statics@3.3.2: + resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} + dependencies: + react-is: 16.13.1 + dev: false + /hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} dev: true @@ -8872,6 +9041,10 @@ packages: readable-stream: 3.6.2 dev: true + /iterall@1.3.0: + resolution: {integrity: sha512-QZ9qOMdF+QLHxy1QIpUHUU1D5pS2CG2P69LF6L6CPjPYA/XMOmKV3PZpawHoAjHNyB0swdVTRxdYT4tbBbxqwg==} + dev: false + /jackspeak@2.2.1: resolution: {integrity: sha512-MXbxovZ/Pm42f6cDIDkl3xpwv1AGwObKwfmjs2nQePiy85tP3fatofl3FC1aBsOtP/6fq5SbtgHwWcMsLP+bDw==} engines: {node: '>=14'} @@ -10817,6 +10990,14 @@ packages: is-wsl: 2.2.0 dev: true + /optimism@0.17.5: + resolution: {integrity: sha512-TEcp8ZwK1RczmvMnvktxHSF2tKgMWjJ71xEFGX5ApLh67VsMSTy1ZUlipJw8W+KaqgOmQ+4pqwkeivY89j+4Vw==} + dependencies: + '@wry/context': 0.7.3 + '@wry/trie': 0.4.3 + tslib: 2.5.3 + dev: false + /optionator@0.9.1: resolution: {integrity: sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==} engines: {node: '>= 0.8.0'} @@ -11426,7 +11607,6 @@ packages: loose-envify: 1.4.0 object-assign: 4.1.1 react-is: 16.13.1 - dev: true /property-information@6.2.0: resolution: {integrity: sha512-kma4U7AFCTwpqq5twzC1YVIDXSqg6qQK6JN0smOw8fgRy1OkMi0CYSzFmsy6dnqSenamAtj0CyXMUJ1Mf6oROg==} @@ -11598,7 +11778,6 @@ packages: /react-is@16.13.1: resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} - dev: true /react-is@17.0.2: resolution: {integrity: sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==} @@ -11966,6 +12145,11 @@ packages: supports-preserve-symlinks-flag: 1.0.0 dev: true + /response-iterator@0.2.6: + resolution: {integrity: sha512-pVzEEzrsg23Sh053rmDUvLSkGXluZio0qu8VT6ukrYuvtjVfCbDZH9d6PGXb8HZfzdNZt8feXv/jvUzlhRgLnw==} + engines: {node: '>=0.8'} + dev: false + /restore-cursor@3.1.0: resolution: {integrity: sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==} engines: {node: '>=8'} @@ -12826,6 +13010,11 @@ packages: stable: 0.1.8 dev: true + /symbol-observable@4.0.0: + resolution: {integrity: sha512-b19dMThMV4HVFynSAM1++gBHAbk2Tc/osgLIBZMKsyqh34jb2e8Os7T6ZW/Bt3pJFdBTd2JwAnAAEQV7rSNvcQ==} + engines: {node: '>=0.10'} + dev: false + /sync-request@6.1.0: resolution: {integrity: sha512-8fjNkrNlNCrVc/av+Jn+xxqfCjYaBoHqCsDz6mt030UMxJGr+GSfCV1dQt2gRtlL63+VPidwDVLr7V2OcTSdRw==} engines: {node: '>=8.0.0'} @@ -13102,6 +13291,13 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true + /ts-invariant@0.10.3: + resolution: {integrity: sha512-uivwYcQaxAucv1CzRp2n/QdYPo4ILf9VXgH19zEIjFx2EJufV16P0JtJVpYHy89DItG6Kwj2oIUjrcK5au+4tQ==} + engines: {node: '>=8'} + dependencies: + tslib: 2.5.3 + dev: false + /ts-node@10.9.1(@types/node@18.16.18)(typescript@5.1.3): resolution: {integrity: sha512-NtVysVPkxxrwFGUUxGYhfux8k78pQB3JqYBXlLRZgdGUqTO5wU/UyHop5p70iEbGhB7q5KmiZiU0Y3KlJrScEw==} hasBin: true @@ -13587,6 +13783,11 @@ packages: spdx-expression-parse: 3.0.1 dev: true + /value-or-promise@1.0.12: + resolution: {integrity: sha512-Z6Uz+TYwEqE7ZN50gwn+1LCVo9ZVrpxRPOhOLnncYkY1ZzOYtrX8Fwf/rFktZ8R5mJms6EZf5TqNOMeZmnPq9Q==} + engines: {node: '>=12'} + dev: false + /varint@6.0.0: resolution: {integrity: sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==} dev: true @@ -14043,8 +14244,8 @@ packages: utf-8-validate: optional: true - /ws@8.13.0: - resolution: {integrity: sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA==} + /ws@8.14.1: + resolution: {integrity: sha512-4OOseMUq8AzRBI/7SLMUwO+FEDnguetSk7KMb1sHwvF2w2Wv5Hoj0nlifx8vtGsftE/jWHojPy8sMMzYLJ2G/A==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 @@ -14054,7 +14255,6 @@ packages: optional: true utf-8-validate: optional: true - dev: true /xtend@4.0.2: resolution: {integrity: sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==} @@ -14157,6 +14357,16 @@ packages: '@types/yoga-layout': 1.9.2 dev: false + /zen-observable-ts@1.2.5: + resolution: {integrity: sha512-QZWQekv6iB72Naeake9hS1KxHlotfRpe+WGNbNx5/ta+R3DNjVO2bswf63gXlWDcs+EMd7XY8HfVQyP1X6T4Zg==} + dependencies: + zen-observable: 0.8.15 + dev: false + + /zen-observable@0.8.15: + resolution: {integrity: sha512-PQ2PC7R9rslx84ndNBZB/Dkv8V8fZEpk83RLgXtYd0fwUgEjseMn1Dgajh2x6S8QbZAFa9p2qVCEuYZNgve0dQ==} + dev: false + /zod@3.21.4: resolution: {integrity: sha512-m46AKbrzKVzOzs/DZgVnG5H55N1sv1M8qZU3A8RIKbs3mrACDNeIOeilDymVb2HdmP8uwshOCF4uJ8uM9rCqJw==} dev: false