From 4ada070478e1d96b6b2b62333c575182b2b4b6e2 Mon Sep 17 00:00:00 2001 From: kevin Date: Thu, 4 Apr 2024 18:09:35 -0400 Subject: [PATCH 01/39] fix: relocated runLogsSql into setupPartitionLogsTable --- runner/src/provisioner/provisioner.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/runner/src/provisioner/provisioner.ts b/runner/src/provisioner/provisioner.ts index 89e466809..bb0aebf68 100644 --- a/runner/src/provisioner/provisioner.ts +++ b/runner/src/provisioner/provisioner.ts @@ -137,7 +137,7 @@ export default class Provisioner { async setupPartitionedLogsTable (userName: string, databaseName: string, schemaName: string): Promise { await wrapError( async () => { - // TODO: Create logs table + // await this.runLogsSql(databaseName, schemaName); await this.grantCronAccess(userName); await this.scheduleLogPartitionJobs(userName, databaseName, schemaName); }, @@ -244,7 +244,6 @@ export default class Provisioner { await this.createSchema(databaseName, schemaName); - // await this.runLogsSql(databaseName, schemaName); await this.runIndexerSql(databaseName, schemaName, databaseSchema); // TODO re-enable once logs table is created From 8b57a79526561f0571619ed898c209c8a6b11022 Mon Sep 17 00:00:00 2001 From: kevin Date: Thu, 4 Apr 2024 18:13:45 -0400 Subject: [PATCH 02/39] chore: add prefix for INDEXs in schema --- runner/src/provisioner/schemas/logs-table.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/runner/src/provisioner/schemas/logs-table.ts b/runner/src/provisioner/schemas/logs-table.ts index 1d63d781e..d045f327a 100644 --- a/runner/src/provisioner/schemas/logs-table.ts +++ b/runner/src/provisioner/schemas/logs-table.ts @@ -10,11 +10,11 @@ CREATE TABLE __logs ( PRIMARY KEY (date, id) ) PARTITION BY RANGE (date); -CREATE INDEX logs_timestamp_idx ON __logs USING btree (timestamp); -CREATE INDEX logs_type_idx ON __logs USING btree (type); -CREATE INDEX logs_level_idx ON __logs USING btree (level); -CREATE INDEX logs_block_height_idx ON __logs USING btree (block_height); -CREATE INDEX logs_search_vector_idx ON __logs USING GIN (to_tsvector('english', message)); +CREATE INDEX __logs_timestamp_idx ON __logs USING btree (timestamp); +CREATE INDEX __logs_type_idx ON __logs USING btree (type); +CREATE INDEX __logs_level_idx ON __logs USING btree (level); +CREATE INDEX __logs_block_height_idx ON __logs USING btree (block_height); +CREATE INDEX __logs_search_vector_idx ON __logs USING GIN (to_tsvector('english', message)); CREATE OR REPLACE FUNCTION fn_create_partition(_tbl text, _date date, _interval_start text, _interval_end text) From a709dae32eec2574e3be45f1dff48154c3dee9b3 Mon Sep 17 00:00:00 2001 From: kevin Date: Fri, 5 Apr 2024 14:32:30 -0400 Subject: [PATCH 03/39] feat: added log-entry class and test --- runner/src/log-entry/log-entry.test.ts | 27 ++++++++++++++++++++++++++ runner/src/log-entry/log-entry.ts | 22 +++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 runner/src/log-entry/log-entry.test.ts create mode 100644 runner/src/log-entry/log-entry.ts diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts new file mode 100644 index 000000000..fe255a36d --- /dev/null +++ b/runner/src/log-entry/log-entry.test.ts @@ -0,0 +1,27 @@ +import LogEntry from './log-entry'; +import { LogType, LogLevel } from '../indexer-logger/indexer-logger'; + +describe('LogEntry', () => { + test('should create a LogEntry instance with current timestamp', () => { + const currentTime = new Date(); + const logEntry = new LogEntry('Test message', LogLevel.INFO, LogType.SYSTEM); + expect(logEntry.timestamp).toBeInstanceOf(Date); + const timestampDifference = Math.abs(currentTime.getTime() - logEntry.timestamp.getTime()); + expect(timestampDifference).toBeLessThanOrEqual(1000); + }); + + test('should create a LogEntry instance with block height', () => { + const logEntry = new LogEntry('Test message', LogLevel.INFO, LogType.SYSTEM, 12345); + expect(logEntry.blockHeight).toBe(12345); + }); + + test('systemInfo static method should create a LogEntry instance with predefined parameters', () => { + const systemLogEntry = LogEntry.systemInfo('System info message', 67890); + expect(systemLogEntry.type).toBe(LogType.SYSTEM); + }); + + test('userInfo static method should create a LogEntry instance with predefined parameters', () => { + const systemLogEntry = LogEntry.userLogs('successful run of indexer', LogLevel.INFO, 67890); + expect(systemLogEntry.type).toBe(LogType.USER); + }); +}); diff --git a/runner/src/log-entry/log-entry.ts b/runner/src/log-entry/log-entry.ts new file mode 100644 index 000000000..01d9769b1 --- /dev/null +++ b/runner/src/log-entry/log-entry.ts @@ -0,0 +1,22 @@ +import { LogType, LogLevel } from '../indexer-logger/indexer-logger'; + +export default class LogEntry { + public readonly timestamp: Date; + + constructor ( + public readonly message: string, + public readonly level: LogLevel, + public readonly type: LogType, + public readonly blockHeight?: number + ) { + this.timestamp = new Date(); + } + + static systemInfo (message: string, blockHeight?: number): LogEntry { + return new LogEntry(message, LogLevel.INFO, LogType.SYSTEM, blockHeight); + } + + static userLogs (message: string, level: LogLevel, blockHeight?: number): LogEntry { + return new LogEntry(message, level, LogType.USER, blockHeight); + } +} From f4c7ccb7a1a1c05a22f3e0cc8858ead542b8741c Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Fri, 5 Apr 2024 14:49:19 -0400 Subject: [PATCH 04/39] Update log-entry.test.ts removed useless test --- runner/src/log-entry/log-entry.test.ts | 5 ----- 1 file changed, 5 deletions(-) diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts index fe255a36d..434cfb641 100644 --- a/runner/src/log-entry/log-entry.test.ts +++ b/runner/src/log-entry/log-entry.test.ts @@ -10,11 +10,6 @@ describe('LogEntry', () => { expect(timestampDifference).toBeLessThanOrEqual(1000); }); - test('should create a LogEntry instance with block height', () => { - const logEntry = new LogEntry('Test message', LogLevel.INFO, LogType.SYSTEM, 12345); - expect(logEntry.blockHeight).toBe(12345); - }); - test('systemInfo static method should create a LogEntry instance with predefined parameters', () => { const systemLogEntry = LogEntry.systemInfo('System info message', 67890); expect(systemLogEntry.type).toBe(LogType.SYSTEM); From 9de783fe673489dbde2f1f9a94f225da10ddd049 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Fri, 5 Apr 2024 14:56:16 -0400 Subject: [PATCH 05/39] Update log-entry.test.ts --- runner/src/log-entry/log-entry.test.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts index 434cfb641..e694d5e90 100644 --- a/runner/src/log-entry/log-entry.test.ts +++ b/runner/src/log-entry/log-entry.test.ts @@ -16,7 +16,7 @@ describe('LogEntry', () => { }); test('userInfo static method should create a LogEntry instance with predefined parameters', () => { - const systemLogEntry = LogEntry.userLogs('successful run of indexer', LogLevel.INFO, 67890); - expect(systemLogEntry.type).toBe(LogType.USER); + const userLogEntry = LogEntry.userLogs('successful run of indexer', LogLevel.INFO, 67890); + expect(userLogEntry.type).toBe(LogType.USER); }); }); From 89fa86505290c21b7c0da723c47f70f3848b3af7 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 12:19:35 -0400 Subject: [PATCH 06/39] uncommented out provisioning --- runner/src/provisioner/provisioner.test.ts | 38 ++++++++++++++-------- runner/src/provisioner/provisioner.ts | 18 +++++----- 2 files changed, 32 insertions(+), 24 deletions(-) diff --git a/runner/src/provisioner/provisioner.test.ts b/runner/src/provisioner/provisioner.test.ts index 86cd50a47..27131b61e 100644 --- a/runner/src/provisioner/provisioner.test.ts +++ b/runner/src/provisioner/provisioner.test.ts @@ -15,6 +15,7 @@ describe('Provisioner', () => { const functionName = 'test-function'; const sanitizedFunctionName = 'test_function'; const databaseSchema = 'CREATE TABLE blocks (height numeric)'; + const logsDDL = expect.any(String) const error = new Error('some error'); const schemaName = `${sanitizedAccountId}_${sanitizedFunctionName}`; @@ -38,6 +39,7 @@ describe('Provisioner', () => { addDatasource: jest.fn().mockReturnValueOnce(null), executeSqlOnSchema: jest.fn().mockReturnValueOnce(null), createSchema: jest.fn().mockReturnValueOnce(null), + setupPartitionedLogsTable: jest.fn().mockReturnValueOnce(null), doesSourceExist: jest.fn().mockReturnValueOnce(false), doesSchemaExist: jest.fn().mockReturnValueOnce(false), untrackTables: jest.fn().mockReturnValueOnce(null), @@ -99,18 +101,19 @@ describe('Provisioner', () => { ['GRANT ALL PRIVILEGES ON DATABASE morgs_near TO morgs_near'], ['REVOKE CONNECT ON DATABASE morgs_near FROM PUBLIC'], ]); - // TODO re-enable once logs table is created - // expect(cronPgClient.query.mock.calls).toEqual([ - // ['GRANT USAGE ON SCHEMA cron TO morgs_near'], - // ['GRANT EXECUTE ON FUNCTION cron.schedule_in_database TO morgs_near;'], - // ]); - // expect(userPgClientQuery.mock.calls).toEqual([ - // ["SELECT cron.schedule_in_database('morgs_near_test_function_logs_create_partition', '0 1 * * *', $$SELECT fn_create_partition('morgs_near_test_function.__logs', CURRENT_DATE, '1 day', '2 day')$$, 'morgs_near');"], - // ["SELECT cron.schedule_in_database('morgs_near_test_function_logs_delete_partition', '0 2 * * *', $$SELECT fn_delete_partition('morgs_near_test_function.__logs', CURRENT_DATE, '-15 day', '-14 day')$$, 'morgs_near');"] - // ]); + + expect(cronPgClient.query.mock.calls).toEqual([ + ['GRANT USAGE ON SCHEMA cron TO morgs_near'], + ['GRANT EXECUTE ON FUNCTION cron.schedule_in_database TO morgs_near;'], + ]); + expect(userPgClientQuery.mock.calls).toEqual([ + ["SELECT cron.schedule_in_database('morgs_near_test_function_logs_create_partition', '0 1 * * *', $$SELECT fn_create_partition('morgs_near_test_function.__logs', CURRENT_DATE, '1 day', '2 day')$$, 'morgs_near');"], + ["SELECT cron.schedule_in_database('morgs_near_test_function_logs_delete_partition', '0 2 * * *', $$SELECT fn_delete_partition('morgs_near_test_function.__logs', CURRENT_DATE, '-15 day', '-14 day')$$, 'morgs_near');"] + ]); expect(hasuraClient.addDatasource).toBeCalledWith(sanitizedAccountId, password, sanitizedAccountId); expect(hasuraClient.createSchema).toBeCalledWith(sanitizedAccountId, schemaName); - expect(hasuraClient.executeSqlOnSchema).toBeCalledWith(sanitizedAccountId, schemaName, databaseSchema); + expect(hasuraClient.executeSqlOnSchema).toHaveBeenNthCalledWith(1, sanitizedAccountId, schemaName, databaseSchema); + expect(hasuraClient.executeSqlOnSchema).toHaveBeenNthCalledWith(2, sanitizedAccountId, schemaName, logsDDL); expect(hasuraClient.getTableNames).toBeCalledWith(schemaName, sanitizedAccountId); expect(hasuraClient.trackTables).toBeCalledWith(schemaName, tableNames, sanitizedAccountId); expect(hasuraClient.addPermissionsToTables).toBeCalledWith( @@ -172,11 +175,18 @@ describe('Provisioner', () => { await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to add datasource: some error'); }); - it.skip('throws an error when it fails to run sql', async () => { + it('throws an error when it fails to run sql to create indexer sql', async () => { hasuraClient.executeSqlOnSchema = jest.fn().mockRejectedValue(error); + + await expect(provisioner.runIndexerSql(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to run user script: some error'); + }); - await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to run logs script: some error'); + it('throws an error when it fails to run sql to create logs sql', async () => { + hasuraClient.executeSqlOnSchema = jest.fn().mockRejectedValue(error); + + await expect(provisioner.runLogsSql(accountId, functionName)).rejects.toThrow('Failed to run logs script: some error'); }); + it('throws an error when it fails to fetch table names', async () => { hasuraClient.getTableNames = jest.fn().mockRejectedValue(error); @@ -203,13 +213,13 @@ describe('Provisioner', () => { }); // TODO re-enable once logs table is created - it.skip('throws when grant cron access fails', async () => { + it('throws when grant cron access fails', async () => { cronPgClient.query = jest.fn().mockRejectedValue(error); await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to setup partitioned logs table: Failed to grant cron access: some error'); }); - it.skip('throws when scheduling cron jobs fails', async () => { + it('throws when scheduling cron jobs fails', async () => { userPgClientQuery = jest.fn().mockRejectedValueOnce(error); await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to setup partitioned logs table: Failed to schedule log partition jobs: some error'); diff --git a/runner/src/provisioner/provisioner.ts b/runner/src/provisioner/provisioner.ts index bb0aebf68..c1626af50 100644 --- a/runner/src/provisioner/provisioner.ts +++ b/runner/src/provisioner/provisioner.ts @@ -4,7 +4,7 @@ import pgFormatLib from 'pg-format'; import { wrapError } from '../utility'; import cryptoModule from 'crypto'; import HasuraClient from '../hasura-client'; -// import { logsTableDDL } from './schemas/logs-table'; +import { logsTableDDL } from './schemas/logs-table'; import PgClientClass from '../pg-client'; const DEFAULT_PASSWORD_LENGTH = 16; @@ -114,7 +114,7 @@ export default class Provisioner { host: this.config.hasuraHostOverride ?? userDbConnectionParameters.host, port: this.config.hasuraPortOverride ?? userDbConnectionParameters.port, }); - + console.log(userCronPgClient) await userCronPgClient.query( this.pgFormat( "SELECT cron.schedule_in_database('%1$I_logs_create_partition', '0 1 * * *', $$SELECT fn_create_partition('%1$I.__logs', CURRENT_DATE, '1 day', '2 day')$$, %2$L);", @@ -137,7 +137,7 @@ export default class Provisioner { async setupPartitionedLogsTable (userName: string, databaseName: string, schemaName: string): Promise { await wrapError( async () => { - // await this.runLogsSql(databaseName, schemaName); + await this.runLogsSql(databaseName, schemaName); await this.grantCronAccess(userName); await this.scheduleLogPartitionJobs(userName, databaseName, schemaName); }, @@ -185,10 +185,10 @@ export default class Provisioner { return await wrapError(async () => await this.hasuraClient.createSchema(databaseName, schemaName), 'Failed to create schema'); } - // async runLogsSql (databaseName: string, schemaName: string): Promise { - // const logsDDL = logsTableDDL(schemaName); - // return await wrapError(async () => await this.hasuraClient.executeSqlOnSchema(databaseName, schemaName, logsDDL), 'Failed to run logs script'); - // } + async runLogsSql (databaseName: string, schemaName: string): Promise { + const logsDDL = logsTableDDL(schemaName); + return await wrapError(async () => await this.hasuraClient.executeSqlOnSchema(databaseName, schemaName, logsDDL), 'Failed to run logs script'); + } async runIndexerSql (databaseName: string, schemaName: string, sqlScript: any): Promise { return await wrapError(async () => await this.hasuraClient.executeSqlOnSchema(databaseName, schemaName, sqlScript), 'Failed to run user script'); @@ -245,9 +245,7 @@ export default class Provisioner { await this.createSchema(databaseName, schemaName); await this.runIndexerSql(databaseName, schemaName, databaseSchema); - - // TODO re-enable once logs table is created - // await this.setupPartitionedLogsTable(userName, databaseName, schemaName); + await this.setupPartitionedLogsTable(userName, databaseName, schemaName); const updatedTableNames = await this.getTableNames(schemaName, databaseName); From 8d0da62b0adcb0a6b3f70cd4fc4e3447d9c3cf1c Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 13:05:08 -0400 Subject: [PATCH 07/39] feat: added additional static classes in LogEntry --- runner/src/log-entry/log-entry.test.ts | 74 ++++++++++++++++++++++---- runner/src/log-entry/log-entry.ts | 34 ++++++++++-- 2 files changed, 94 insertions(+), 14 deletions(-) diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts index e694d5e90..1a3c31f84 100644 --- a/runner/src/log-entry/log-entry.test.ts +++ b/runner/src/log-entry/log-entry.test.ts @@ -2,21 +2,73 @@ import LogEntry from './log-entry'; import { LogType, LogLevel } from '../indexer-logger/indexer-logger'; describe('LogEntry', () => { - test('should create a LogEntry instance with current timestamp', () => { - const currentTime = new Date(); - const logEntry = new LogEntry('Test message', LogLevel.INFO, LogType.SYSTEM); + test('create a system debug log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.systemDebug('Debug message', blockHeight); + expect(logEntry.message).toBe('Debug message'); + expect(logEntry.level).toBe(LogLevel.DEBUG); + expect(logEntry.type).toBe(LogType.SYSTEM); expect(logEntry.timestamp).toBeInstanceOf(Date); - const timestampDifference = Math.abs(currentTime.getTime() - logEntry.timestamp.getTime()); - expect(timestampDifference).toBeLessThanOrEqual(1000); + expect(logEntry.blockHeight).toBe(blockHeight); }); - test('systemInfo static method should create a LogEntry instance with predefined parameters', () => { - const systemLogEntry = LogEntry.systemInfo('System info message', 67890); - expect(systemLogEntry.type).toBe(LogType.SYSTEM); + test('create a system info log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.systemInfo('Info message', blockHeight); + expect(logEntry.message).toBe('Info message'); + expect(logEntry.level).toBe(LogLevel.INFO); + expect(logEntry.type).toBe(LogType.SYSTEM); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); + }); + + test('create a system warn log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.systemWarn('Warn message', blockHeight); + expect(logEntry.message).toBe('Warn message'); + expect(logEntry.level).toBe(LogLevel.WARN); + expect(logEntry.type).toBe(LogType.SYSTEM); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); + }); + + test('create a system error log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.systemError('Error message', blockHeight); + expect(logEntry.message).toBe('Error message'); + expect(logEntry.level).toBe(LogLevel.ERROR); + expect(logEntry.type).toBe(LogType.SYSTEM); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); }); - test('userInfo static method should create a LogEntry instance with predefined parameters', () => { - const userLogEntry = LogEntry.userLogs('successful run of indexer', LogLevel.INFO, 67890); - expect(userLogEntry.type).toBe(LogType.USER); + test('create a user info log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.userInfo('User info message', blockHeight); + expect(logEntry.message).toBe('User info message'); + expect(logEntry.level).toBe(LogLevel.INFO); + expect(logEntry.type).toBe(LogType.USER); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); + }); + + test('create a user warn log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.userWarn('User warn message', blockHeight); + expect(logEntry.message).toBe('User warn message'); + expect(logEntry.level).toBe(LogLevel.WARN); + expect(logEntry.type).toBe(LogType.USER); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); + }); + + test('create a user error log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.userError('User error message', blockHeight); + expect(logEntry.message).toBe('User error message'); + expect(logEntry.level).toBe(LogLevel.ERROR); + expect(logEntry.type).toBe(LogType.USER); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); }); }); diff --git a/runner/src/log-entry/log-entry.ts b/runner/src/log-entry/log-entry.ts index 01d9769b1..216d4c2cb 100644 --- a/runner/src/log-entry/log-entry.ts +++ b/runner/src/log-entry/log-entry.ts @@ -12,11 +12,39 @@ export default class LogEntry { this.timestamp = new Date(); } + static createLog (message: string, level: LogLevel, type: LogType, blockHeight?: number): LogEntry { + return new LogEntry(message, level, type, blockHeight); + } + + static systemDebug (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.DEBUG, LogType.SYSTEM, blockHeight); + } + static systemInfo (message: string, blockHeight?: number): LogEntry { - return new LogEntry(message, LogLevel.INFO, LogType.SYSTEM, blockHeight); + return LogEntry.createLog(message, LogLevel.INFO, LogType.SYSTEM, blockHeight); + } + + static systemWarn (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.WARN, LogType.SYSTEM, blockHeight); + } + + static systemError (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.ERROR, LogType.SYSTEM, blockHeight); + } + + static userLog (message: string, level: LogLevel, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, level, LogType.USER, blockHeight); + } + + static userInfo (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.INFO, LogType.USER, blockHeight); + } + + static userWarn (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.WARN, LogType.USER, blockHeight); } - static userLogs (message: string, level: LogLevel, blockHeight?: number): LogEntry { - return new LogEntry(message, level, LogType.USER, blockHeight); + static userError (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.ERROR, LogType.USER, blockHeight); } } From 91a3d1b4a817b089df744638cbe8c191e2dc842f Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 13:23:07 -0400 Subject: [PATCH 08/39] fix: modified indexer-logger and test to use logEntry --- .../src/indexer-logger/indexer-logger.test.ts | 75 +++++-------------- runner/src/indexer-logger/indexer-logger.ts | 20 ++--- 2 files changed, 25 insertions(+), 70 deletions(-) diff --git a/runner/src/indexer-logger/indexer-logger.test.ts b/runner/src/indexer-logger/indexer-logger.test.ts index 885a84ebe..d66aa06ca 100644 --- a/runner/src/indexer-logger/indexer-logger.test.ts +++ b/runner/src/indexer-logger/indexer-logger.test.ts @@ -1,7 +1,7 @@ import pgFormat from 'pg-format'; -import IndexerLogger from './indexer-logger'; +import IndexerLogger, { LogLevel } from './indexer-logger'; import type PgClient from '../pg-client'; -import { LogType, LogLevel, type LogEntry } from './indexer-logger'; +import LogEntry from '../log-entry/log-entry'; describe('IndexerLogger', () => { let pgClient: PgClient; @@ -27,15 +27,8 @@ describe('IndexerLogger', () => { describe('writeLog', () => { it('should insert a single log entry into the database', async () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); - const logEntry: LogEntry = { - blockHeight: 123, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message' - }; - - await indexerLogger.writeLogs(logEntry); + const infoEntry = LogEntry.systemInfo('Info message'); + await indexerLogger.writeLogs(infoEntry); const expectedQueryStructure = `INSERT INTO "${functionName}".__logs (block_height, date, timestamp, type, level, message) VALUES`; expect(query.mock.calls[0][0]).toContain(expectedQueryStructure); @@ -45,34 +38,18 @@ describe('IndexerLogger', () => { query.mockRejectedValueOnce(new Error('Failed to insert log')); const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); - const logEntry: LogEntry = { - blockHeight: 123, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message' - }; - - await expect(indexerLogger.writeLogs(logEntry)).rejects.toThrow('Failed to insert log'); + const infoEntry = LogEntry.systemInfo('Information message'); + + await expect(indexerLogger.writeLogs(infoEntry)).rejects.toThrow('Failed to insert log'); }); it('should insert a batch of log entries into the database', async () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); + const debugEntry = LogEntry.systemDebug('Debug message'); + const infoEntry = LogEntry.systemInfo('Information message'); const logEntries: LogEntry[] = [ - { - blockHeight: 123, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message 1' - }, - { - blockHeight: 124, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message 2' - } + debugEntry, + infoEntry ]; await indexerLogger.writeLogs(logEntries); @@ -85,21 +62,11 @@ describe('IndexerLogger', () => { query.mockRejectedValueOnce(new Error('Failed to insert batch of logs')); const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); + const debugEntry = LogEntry.systemDebug('Debug message'); + const infoEntry = LogEntry.systemInfo('Information message'); const logEntries: LogEntry[] = [ - { - blockHeight: 123, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message 1' - }, - { - blockHeight: 124, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message 2' - } + debugEntry, + infoEntry ]; await expect(indexerLogger.writeLogs(logEntries)).rejects.toThrow('Failed to insert batch of logs'); @@ -115,15 +82,9 @@ describe('IndexerLogger', () => { it('should skip log entries with levels lower than the logging level specified in the constructor', async () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.ERROR, mockDatabaseConnectionParameters, pgClient); - const logEntry: LogEntry = { - blockHeight: 123, - logTimestamp: new Date(), - logType: LogType.SYSTEM, - logLevel: LogLevel.INFO, - message: 'Test log message' - }; - - await indexerLogger.writeLogs(logEntry); + const debugEntry = LogEntry.systemDebug('Debug message'); + + await indexerLogger.writeLogs(debugEntry); expect(query).not.toHaveBeenCalled(); }); diff --git a/runner/src/indexer-logger/indexer-logger.ts b/runner/src/indexer-logger/indexer-logger.ts index ee98ccb54..60adb99ab 100644 --- a/runner/src/indexer-logger/indexer-logger.ts +++ b/runner/src/indexer-logger/indexer-logger.ts @@ -3,14 +3,7 @@ import { wrapError } from '../utility'; import PgClient from '../pg-client'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace } from '@opentelemetry/api'; - -export interface LogEntry { - blockHeight: number - logTimestamp: Date - logType: LogType - logLevel: LogLevel - message: string -} +import type LogEntry from '../log-entry/log-entry'; export enum LogLevel { DEBUG = 2, @@ -57,7 +50,8 @@ export default class IndexerLogger { async writeLogs ( logEntries: LogEntry | LogEntry[], ): Promise { - const entriesArray = (Array.isArray(logEntries) ? logEntries : [logEntries]).filter(entry => this.shouldLog(entry.logLevel)); ; + console.log(logEntries); + const entriesArray = (Array.isArray(logEntries) ? logEntries : [logEntries]).filter(entry => this.shouldLog(entry.level)); ; if (entriesArray.length === 0) return; const spanMessage = `write log for ${entriesArray.length === 1 ? 'single entry' : `batch of ${entriesArray.length}`} through postgres `; @@ -66,10 +60,10 @@ export default class IndexerLogger { await wrapError(async () => { const values = entriesArray.map(entry => [ entry.blockHeight, - entry.logTimestamp, - entry.logTimestamp, - entry.logType, - LogLevel[entry.logLevel], + entry.timestamp, + entry.timestamp, + entry.type, + LogLevel[entry.level], entry.message ]); From 0be44dd97598f07a128724534fd46040d3594518 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Mon, 8 Apr 2024 14:47:15 -0400 Subject: [PATCH 09/39] fix: typo --- runner/src/log-entry/log-entry.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/runner/src/log-entry/log-entry.ts b/runner/src/log-entry/log-entry.ts index 216d4c2cb..838f5ec6d 100644 --- a/runner/src/log-entry/log-entry.ts +++ b/runner/src/log-entry/log-entry.ts @@ -32,8 +32,8 @@ export default class LogEntry { return LogEntry.createLog(message, LogLevel.ERROR, LogType.SYSTEM, blockHeight); } - static userLog (message: string, level: LogLevel, blockHeight?: number): LogEntry { - return LogEntry.createLog(message, level, LogType.USER, blockHeight); + static userDebug (message: string, blockHeight?: number): LogEntry { + return LogEntry.createLog(message, LogLevel.DEBUG, LogType.USER, blockHeight); } static userInfo (message: string, blockHeight?: number): LogEntry { From 8a331b2c22a17222da95e2cb0b8feb3c99d7dd83 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Mon, 8 Apr 2024 14:49:19 -0400 Subject: [PATCH 10/39] Update log-entry.test.ts --- runner/src/log-entry/log-entry.test.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts index 1a3c31f84..8f3698fd3 100644 --- a/runner/src/log-entry/log-entry.test.ts +++ b/runner/src/log-entry/log-entry.test.ts @@ -42,6 +42,16 @@ describe('LogEntry', () => { expect(logEntry.blockHeight).toBe(blockHeight); }); + test('create a user debug log entry', () => { + const blockHeight = 100; + const logEntry = LogEntry.userDebug('Debug message', blockHeight); + expect(logEntry.message).toBe('Debug message'); + expect(logEntry.level).toBe(LogLevel.DEBUG); + expect(logEntry.type).toBe(LogType.USER); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBe(blockHeight); + }); + test('create a user info log entry', () => { const blockHeight = 100; const logEntry = LogEntry.userInfo('User info message', blockHeight); From 1809f7ab18e3f2ae1033bda48af36db24e6f5133 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Mon, 8 Apr 2024 14:50:54 -0400 Subject: [PATCH 11/39] chore: removed console.log --- runner/src/indexer-logger/indexer-logger.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runner/src/indexer-logger/indexer-logger.ts b/runner/src/indexer-logger/indexer-logger.ts index 60adb99ab..624132fb9 100644 --- a/runner/src/indexer-logger/indexer-logger.ts +++ b/runner/src/indexer-logger/indexer-logger.ts @@ -50,7 +50,7 @@ export default class IndexerLogger { async writeLogs ( logEntries: LogEntry | LogEntry[], ): Promise { - console.log(logEntries); + const entriesArray = (Array.isArray(logEntries) ? logEntries : [logEntries]).filter(entry => this.shouldLog(entry.level)); ; if (entriesArray.length === 0) return; From 7254a50147fa91bbc16ed4d31a17edd67f914dc6 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 17:18:28 -0400 Subject: [PATCH 12/39] fix: LogLevel and LogType are defined in log-entry --- runner/src/indexer-logger/indexer-logger.test.ts | 4 ++-- runner/src/indexer-logger/indexer-logger.ts | 14 +------------- runner/src/indexer/indexer.test.ts | 4 ++-- runner/src/indexer/indexer.ts | 8 ++++---- runner/src/log-entry/log-entry.test.ts | 3 +-- runner/src/log-entry/log-entry.ts | 11 ++++++++++- runner/src/server/runner-service.test.ts | 2 +- runner/src/server/runner-service.ts | 2 +- runner/src/stream-handler/stream-handler.ts | 10 +++++----- runner/tests/integration.test.ts | 2 +- 10 files changed, 28 insertions(+), 32 deletions(-) diff --git a/runner/src/indexer-logger/indexer-logger.test.ts b/runner/src/indexer-logger/indexer-logger.test.ts index d66aa06ca..cc8411380 100644 --- a/runner/src/indexer-logger/indexer-logger.test.ts +++ b/runner/src/indexer-logger/indexer-logger.test.ts @@ -1,7 +1,7 @@ import pgFormat from 'pg-format'; -import IndexerLogger, { LogLevel } from './indexer-logger'; +import IndexerLogger from './indexer-logger'; import type PgClient from '../pg-client'; -import LogEntry from '../log-entry/log-entry'; +import LogEntry, { LogLevel } from '../log-entry/log-entry'; describe('IndexerLogger', () => { let pgClient: PgClient; diff --git a/runner/src/indexer-logger/indexer-logger.ts b/runner/src/indexer-logger/indexer-logger.ts index 624132fb9..ca2d5bd3e 100644 --- a/runner/src/indexer-logger/indexer-logger.ts +++ b/runner/src/indexer-logger/indexer-logger.ts @@ -3,19 +3,8 @@ import { wrapError } from '../utility'; import PgClient from '../pg-client'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace } from '@opentelemetry/api'; -import type LogEntry from '../log-entry/log-entry'; +import LogEntry, { LogLevel } from '../log-entry/log-entry'; -export enum LogLevel { - DEBUG = 2, - INFO = 5, - WARN = 6, - ERROR = 8, -} - -export enum LogType { - SYSTEM = 'system', - USER = 'user', -} export default class IndexerLogger { tracer = trace.getTracer('queryapi-runner-indexer-logger'); @@ -50,7 +39,6 @@ export default class IndexerLogger { async writeLogs ( logEntries: LogEntry | LogEntry[], ): Promise { - const entriesArray = (Array.isArray(logEntries) ? logEntries : [logEntries]).filter(entry => this.shouldLog(entry.level)); ; if (entriesArray.length === 0) return; diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 789fed970..399908ccd 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -6,7 +6,7 @@ import { VM } from 'vm2'; import DmlHandler from '../dml-handler/dml-handler'; import type PgClient from '../pg-client'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; -import { LogLevel } from '../indexer-logger/indexer-logger'; +import { LogLevel } from '../log-entry/log-entry'; describe('Indexer unit tests', () => { const HASURA_ROLE = 'morgs_near'; @@ -1263,4 +1263,4 @@ CREATE TABLE } ]); }); -}); \ No newline at end of file +}); diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 3dd442003..25beb9b19 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -8,7 +8,7 @@ import DmlHandler from '../dml-handler/dml-handler'; // import IndexerLogger from '../indexer-logger/indexer-logger'; import { type IndexerBehavior, Status } from '../stream-handler/stream-handler'; -import { /*type LogEntry, LogType,*/ LogLevel } from '../indexer-logger/indexer-logger'; +import /** LogEntry, LogType, */{ LogLevel } from '../log-entry/log-entry'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace, type Span } from '@opentelemetry/api'; @@ -147,7 +147,7 @@ export default class Indexer { const resourceCreationSpan = this.tracer.startSpan('prepare vm and context to run indexer code'); simultaneousPromises.push(this.setStatus(functionName, blockHeight, 'RUNNING')); const vm = new VM({ allowAsync: true }); - const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName, /*logEntries*/); + const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName /* logEntries */); vm.freeze(block, 'block'); vm.freeze(lakePrimitives, 'primitives'); @@ -195,7 +195,7 @@ export default class Indexer { ].reduce((acc, val) => val(acc), indexerFunction); } - buildContext (schema: string, functionName: string, blockHeight: number, hasuraRoleName: string/*, logEntries: LogEntry[]*/): Context { + buildContext (schema: string, functionName: string, blockHeight: number, hasuraRoleName: string/*, logEntries: LogEntry[] */): Context { const functionNameWithoutAccount = functionName.split('/')[1].replace(/[.-]/g, '_'); const schemaName = functionName.replace(/[^a-zA-Z0-9]/g, '_'); return { @@ -243,7 +243,7 @@ export default class Indexer { fetchFromSocialApi: async (path, options) => { return await this.deps.fetch(`https://api.near.social${path}`, options); }, - db: this.buildDatabaseContext(functionName, schemaName, schema, blockHeight/*, logEntries*/) + db: this.buildDatabaseContext(functionName, schemaName, schema, blockHeight/*, logEntries */) }; } diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts index 8f3698fd3..d20a62a24 100644 --- a/runner/src/log-entry/log-entry.test.ts +++ b/runner/src/log-entry/log-entry.test.ts @@ -1,5 +1,4 @@ -import LogEntry from './log-entry'; -import { LogType, LogLevel } from '../indexer-logger/indexer-logger'; +import LogEntry, { LogType, LogLevel } from './log-entry'; describe('LogEntry', () => { test('create a system debug log entry', () => { diff --git a/runner/src/log-entry/log-entry.ts b/runner/src/log-entry/log-entry.ts index 838f5ec6d..e657da0bd 100644 --- a/runner/src/log-entry/log-entry.ts +++ b/runner/src/log-entry/log-entry.ts @@ -1,5 +1,14 @@ -import { LogType, LogLevel } from '../indexer-logger/indexer-logger'; +export enum LogLevel { + DEBUG = 2, + INFO = 5, + WARN = 6, + ERROR = 8, +} +export enum LogType { + SYSTEM = 'system', + USER = 'user', +} export default class LogEntry { public readonly timestamp: Date; diff --git a/runner/src/server/runner-service.test.ts b/runner/src/server/runner-service.test.ts index a9f95b42a..d2ec96feb 100644 --- a/runner/src/server/runner-service.test.ts +++ b/runner/src/server/runner-service.test.ts @@ -1,6 +1,6 @@ import type StreamHandler from '../stream-handler/stream-handler'; import { Status } from '../stream-handler/stream-handler'; -import { LogLevel } from '../indexer-logger/indexer-logger'; +import { LogLevel } from '../log-entry/log-entry'; import getRunnerService from './runner-service'; import * as grpc from '@grpc/grpc-js'; diff --git a/runner/src/server/runner-service.ts b/runner/src/server/runner-service.ts index 34be24f9d..e06d65413 100644 --- a/runner/src/server/runner-service.ts +++ b/runner/src/server/runner-service.ts @@ -1,7 +1,7 @@ import { type ServerUnaryCall, type sendUnaryData } from '@grpc/grpc-js'; import * as grpc from '@grpc/grpc-js'; import { Status } from '../stream-handler/stream-handler'; -import { LogLevel } from '../indexer-logger/indexer-logger'; +import { LogLevel } from '../log-entry/log-entry'; import crypto from 'crypto'; import { type RunnerHandlers } from '../generated/runner/Runner'; diff --git a/runner/src/stream-handler/stream-handler.ts b/runner/src/stream-handler/stream-handler.ts index 5befe5ff7..fd6acc484 100644 --- a/runner/src/stream-handler/stream-handler.ts +++ b/runner/src/stream-handler/stream-handler.ts @@ -3,7 +3,7 @@ import { Worker, isMainThread } from 'worker_threads'; import { registerWorkerMetrics, deregisterWorkerMetrics } from '../metrics'; import Indexer from '../indexer'; -import { /*LogType,*/ LogLevel } from '../indexer-logger/indexer-logger'; +import { /* LogType, */ LogLevel } from '../log-entry/log-entry'; export enum Status { RUNNING = 'RUNNING', @@ -94,10 +94,10 @@ export default class StreamHandler { // message: `Encountered error processing stream: ${this.streamKey}, terminating thread\n${error.toString()}` // }) ]) - .catch((e) => { - console.error(`Failed to write log for stream: ${this.streamKey}`, e); - }); - + .catch((e) => { + console.error(`Failed to write log for stream: ${this.streamKey}`, e); + }); + this.worker.terminate().catch(() => { console.error(`Failed to terminate thread for stream: ${this.streamKey}`); }); diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 105fe889d..1c7490e01 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -6,7 +6,7 @@ import Indexer from '../src/indexer'; import HasuraClient from '../src/hasura-client'; import Provisioner from '../src/provisioner'; import PgClient from '../src/pg-client'; -import { LogLevel } from '../src/indexer-logger/indexer-logger'; +import { LogLevel } from '../src/log-entry/log-entry'; import { HasuraGraphQLContainer, type StartedHasuraGraphQLContainer } from './testcontainers/hasura'; import { PostgreSqlContainer, type StartedPostgreSqlContainer } from './testcontainers/postgres'; From 749e9f25c280927b6a7a97a292f983eb5993530d Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 17:38:03 -0400 Subject: [PATCH 13/39] fix: indexer-logger now matches exact query --- .../src/indexer-logger/indexer-logger.test.ts | 19 +++++++++++++++++-- runner/src/indexer-logger/indexer-logger.ts | 3 ++- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/runner/src/indexer-logger/indexer-logger.test.ts b/runner/src/indexer-logger/indexer-logger.test.ts index cc8411380..2a276adf6 100644 --- a/runner/src/indexer-logger/indexer-logger.test.ts +++ b/runner/src/indexer-logger/indexer-logger.test.ts @@ -30,8 +30,23 @@ describe('IndexerLogger', () => { const infoEntry = LogEntry.systemInfo('Info message'); await indexerLogger.writeLogs(infoEntry); - const expectedQueryStructure = `INSERT INTO "${functionName}".__logs (block_height, date, timestamp, type, level, message) VALUES`; - expect(query.mock.calls[0][0]).toContain(expectedQueryStructure); + const timestampPattern = '\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}'; + const expectedQueryStructure = `INSERT INTO "${functionName}".__logs \\(block_height, date, timestamp, type, level, message\\) VALUES \\(NULL, '${timestampPattern}', '${timestampPattern}', 'system', 'INFO', 'Info message'\\)`; + + const queryRegex = new RegExp(expectedQueryStructure); + expect(query.mock.calls[0][0]).toMatch(queryRegex); + }); + + it('should insert a single log entry into the database when logEntry has a blockheight', async () => { + const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); + const errorEntry = LogEntry.systemError('Error message', 12345); + await indexerLogger.writeLogs(errorEntry); + + const timestampPattern = '\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}'; + const expectedQueryStructure = `INSERT INTO "${functionName}".__logs \\(block_height, date, timestamp, type, level, message\\) VALUES \\('12345', '${timestampPattern}', '${timestampPattern}', 'system', 'ERROR', 'Error message'\\)`; + + const queryRegex = new RegExp(expectedQueryStructure); + expect(query.mock.calls[0][0]).toMatch(queryRegex); }); it('should handle errors when inserting a single log entry', async () => { diff --git a/runner/src/indexer-logger/indexer-logger.ts b/runner/src/indexer-logger/indexer-logger.ts index ca2d5bd3e..1ade2b310 100644 --- a/runner/src/indexer-logger/indexer-logger.ts +++ b/runner/src/indexer-logger/indexer-logger.ts @@ -3,7 +3,8 @@ import { wrapError } from '../utility'; import PgClient from '../pg-client'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace } from '@opentelemetry/api'; -import LogEntry, { LogLevel } from '../log-entry/log-entry'; +import type LogEntry from '../log-entry/log-entry'; +import { LogLevel } from '../log-entry/log-entry'; export default class IndexerLogger { tracer = trace.getTracer('queryapi-runner-indexer-logger'); From a58292173d07c0c632f2c2559d9eee97f508abd6 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 18:05:58 -0400 Subject: [PATCH 14/39] fix: added test without bh --- runner/src/log-entry/log-entry.test.ts | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/log-entry/log-entry.test.ts index d20a62a24..d3381aefd 100644 --- a/runner/src/log-entry/log-entry.test.ts +++ b/runner/src/log-entry/log-entry.test.ts @@ -80,4 +80,14 @@ describe('LogEntry', () => { expect(logEntry.timestamp).toBeInstanceOf(Date); expect(logEntry.blockHeight).toBe(blockHeight); }); + + test('create a system info log entry without blockheight', () => { + const logEntry = LogEntry.systemInfo('Info message'); + expect(logEntry.message).toBe('Info message'); + expect(logEntry.level).toBe(LogLevel.INFO); + expect(logEntry.type).toBe(LogType.SYSTEM); + expect(logEntry.timestamp).toBeInstanceOf(Date); + expect(logEntry.blockHeight).toBeUndefined(); + }); }); + From 03010611a5bd890f7ae52998f7ca65aabd38a4d0 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 8 Apr 2024 18:42:56 -0400 Subject: [PATCH 15/39] merge scripts --- runner/src/provisioner/provisioner.ts | 1 - runner/src/test-client.ts | 89 +++++++++++++++++++++++++++ scripts/wipe-database.sh | 9 ++- 3 files changed, 95 insertions(+), 4 deletions(-) create mode 100644 runner/src/test-client.ts diff --git a/runner/src/provisioner/provisioner.ts b/runner/src/provisioner/provisioner.ts index c1626af50..88806044b 100644 --- a/runner/src/provisioner/provisioner.ts +++ b/runner/src/provisioner/provisioner.ts @@ -114,7 +114,6 @@ export default class Provisioner { host: this.config.hasuraHostOverride ?? userDbConnectionParameters.host, port: this.config.hasuraPortOverride ?? userDbConnectionParameters.port, }); - console.log(userCronPgClient) await userCronPgClient.query( this.pgFormat( "SELECT cron.schedule_in_database('%1$I_logs_create_partition', '0 1 * * *', $$SELECT fn_create_partition('%1$I.__logs', CURRENT_DATE, '1 day', '2 day')$$, %2$L);", diff --git a/runner/src/test-client.ts b/runner/src/test-client.ts new file mode 100644 index 000000000..5628e26c3 --- /dev/null +++ b/runner/src/test-client.ts @@ -0,0 +1,89 @@ +// Run with 'npx ts-node src/test-client.ts' // located at +import runnerClient from './server/runner-client'; +const schema = ` +CREATE TABLE +versions ( + "id" SERIAL PRIMARY KEY, + "block_height" BIGINT NOT NULL, + "block_timestamp_ms" BIGINT NOT NULL, + "code" VARCHAR NOT NULL, + "component_author_id" VARCHAR NOT NULL, + "component_name" VARCHAR NOT NULL, + "lines_added" INT NOT NULL, + "lines_removed" INT NOT NULL, + "receipt_id" VARCHAR NOT NULL +); +CREATE TABLE +metadata ( + "component_id" VARCHAR PRIMARY KEY, + "block_height" BIGINT NOT NULL, + "block_timestamp_ms" BIGINT NOT NULL, + "code" VARCHAR NOT NULL, + "component_author_id" VARCHAR NOT NULL, + "component_name" VARCHAR NOT NULL, + "star_count" INT NOT NULL, + "fork_count" INT NOT NULL, + "name" VARCHAR, + "image_ipfs_cid" VARCHAR, + "description" VARCHAR, + "fork_of_source" VARCHAR, + "fork_of_block_height" BIGINT, + "tags" VARCHAR, + "website" VARCHAR +); +`; +const code = ` +const h = block.header().height; +const blockTimestampMs = Math.floor( + Number(block.header().timestampNanosec) / 1e6 +); +const code = 'console.log("hello world")'; +const componentAuthorId = 'kevin0.near'; +const componentName = 'test_component_1'; +const linesAdded = 1; +const linesRemoved = 1; +receiptId = '3WGZ91JVF2kxF54SryuktCCmH2kgijuGM9P3uoqSGs5s' +await console.debug('debug log'); +await console.log('info log'); +await console.log('Hello this is some indexer log'); +await console.error('error log'); +// await context.db.Metadata.insert( +// {block_height: h, block_timestamp_ms: blockTimestampMs, code, component_author_id: componentAuthorId, component_name: componentName, star_count: 0, fork_count: 0, name: 'test', image_ipfs_cid: 'test', description: 'test', fork_of_source: 'test', fork_of_block_height: 0, tags: 'test', website: 'test'} +// ); +`; +const indexer = { + account_id: 'kevin3.near', + redis_stream: 'test:block_stream', + function_name: 'component_01', + code, + start_block_height: 113448278, + schema, + provisioned: true, + indexer_rule: { + indexer_rule_kind: 'Action', + matching_rule: { + rule: 'ACTION_ANY', + affected_account_id: 'social.near', + status: 'SUCCESS' + }, + id: null, + name: null + } +}; +void (async function main () { + // console.log(indexer.redis_stream, indexer.account_id, indexer.function_name, indexer.code, indexer.schema) + runnerClient.StartExecutor({ + redisStream: indexer.redis_stream, + accountId: indexer.account_id, + functionName: indexer.function_name, + code: indexer.code, + schema: indexer.schema + }, (err, response) => { + if (err) { + } else { + console.log('start: ', response); + console.log('running...') + } + }); + console.log('done') +})(); \ No newline at end of file diff --git a/scripts/wipe-database.sh b/scripts/wipe-database.sh index 3dcf00bb2..9b7ee3970 100755 --- a/scripts/wipe-database.sh +++ b/scripts/wipe-database.sh @@ -5,8 +5,8 @@ PG_SUPERUSER="postgres" PG_SUPERUSER_PASSWORD="postgrespassword" # Exclude these databases and users -EXCLUDED_DATABASES="'postgres', 'template0', 'template1'" -EXCLUDED_USERS="'postgres'" +EXCLUDED_DATABASES="'postgres', 'template0', 'template1', 'cron'" +EXCLUDED_USERS="'postgres', 'pgbouncer'" # Get a list of databases, excluding the defaults DATABASES=$(psql -U $PG_SUPERUSER -t -c "SELECT datname FROM pg_database WHERE datname NOT IN ($EXCLUDED_DATABASES);") @@ -22,9 +22,12 @@ done # Drop each user for user in $USERS; do + echo "Revoking privileges for user: $user" + psql -U $PG_SUPERUSER -c "REVOKE ALL PRIVILEGES ON FUNCTION cron.schedule_in_database(text,text,text,text,text,boolean) FROM $user;" + psql -U $PG_SUPERUSER -c "REVOKE ALL PRIVILEGES ON SCHEMA cron FROM $user;" echo "Dropping user: $user" psql -U $PG_SUPERUSER -c "DROP USER IF EXISTS $user;" - + done echo "All non-default databases and users have been dropped." From 97e6ebcb7fe69d1b955b0aadeaae3ed52bb90b31 Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 9 Apr 2024 03:40:13 -0400 Subject: [PATCH 16/39] fix: used fake timers on indexer-logger test --- .../src/indexer-logger/indexer-logger.test.ts | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/runner/src/indexer-logger/indexer-logger.test.ts b/runner/src/indexer-logger/indexer-logger.test.ts index 2a276adf6..195f07898 100644 --- a/runner/src/indexer-logger/indexer-logger.test.ts +++ b/runner/src/indexer-logger/indexer-logger.test.ts @@ -26,27 +26,29 @@ describe('IndexerLogger', () => { describe('writeLog', () => { it('should insert a single log entry into the database', async () => { + const date = new Date(); + jest.useFakeTimers({ now: date.getTime() }); + const formattedDate = date.toISOString().replace('T', ' ').replace('Z', '+00'); + const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); const infoEntry = LogEntry.systemInfo('Info message'); await indexerLogger.writeLogs(infoEntry); - const timestampPattern = '\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}'; - const expectedQueryStructure = `INSERT INTO "${functionName}".__logs \\(block_height, date, timestamp, type, level, message\\) VALUES \\(NULL, '${timestampPattern}', '${timestampPattern}', 'system', 'INFO', 'Info message'\\)`; - - const queryRegex = new RegExp(expectedQueryStructure); - expect(query.mock.calls[0][0]).toMatch(queryRegex); + const expectedQueryStructure = `INSERT INTO "${functionName}".__logs (block_height, date, timestamp, type, level, message) VALUES (NULL, '${formattedDate}', '${formattedDate}', 'system', 'INFO', 'Info message')`; + expect(query.mock.calls[0][0]).toEqual(expectedQueryStructure); }); it('should insert a single log entry into the database when logEntry has a blockheight', async () => { + const date = new Date(); + jest.useFakeTimers({ now: date.getTime() }); + const formattedDate = date.toISOString().replace('T', ' ').replace('Z', '+00'); + const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); const errorEntry = LogEntry.systemError('Error message', 12345); await indexerLogger.writeLogs(errorEntry); - const timestampPattern = '\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}\\.\\d+\\+\\d{2}'; - const expectedQueryStructure = `INSERT INTO "${functionName}".__logs \\(block_height, date, timestamp, type, level, message\\) VALUES \\('12345', '${timestampPattern}', '${timestampPattern}', 'system', 'ERROR', 'Error message'\\)`; - - const queryRegex = new RegExp(expectedQueryStructure); - expect(query.mock.calls[0][0]).toMatch(queryRegex); + const expectedQueryStructure = `INSERT INTO "${functionName}".__logs (block_height, date, timestamp, type, level, message) VALUES ('12345', '${formattedDate}', '${formattedDate}', 'system', 'ERROR', 'Error message')`; + expect(query.mock.calls[0][0]).toEqual(expectedQueryStructure); }); it('should handle errors when inserting a single log entry', async () => { From 4e431e90c1757974cae162a3462b61536caef6be Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 9 Apr 2024 03:45:57 -0400 Subject: [PATCH 17/39] fix: writeLog exclusively accepts an array of LogEntry --- runner/src/indexer-logger/indexer-logger.test.ts | 8 ++++---- runner/src/indexer-logger/indexer-logger.ts | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/runner/src/indexer-logger/indexer-logger.test.ts b/runner/src/indexer-logger/indexer-logger.test.ts index 195f07898..d14159285 100644 --- a/runner/src/indexer-logger/indexer-logger.test.ts +++ b/runner/src/indexer-logger/indexer-logger.test.ts @@ -32,7 +32,7 @@ describe('IndexerLogger', () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); const infoEntry = LogEntry.systemInfo('Info message'); - await indexerLogger.writeLogs(infoEntry); + await indexerLogger.writeLogs([infoEntry]); const expectedQueryStructure = `INSERT INTO "${functionName}".__logs (block_height, date, timestamp, type, level, message) VALUES (NULL, '${formattedDate}', '${formattedDate}', 'system', 'INFO', 'Info message')`; expect(query.mock.calls[0][0]).toEqual(expectedQueryStructure); @@ -45,7 +45,7 @@ describe('IndexerLogger', () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); const errorEntry = LogEntry.systemError('Error message', 12345); - await indexerLogger.writeLogs(errorEntry); + await indexerLogger.writeLogs([errorEntry]); const expectedQueryStructure = `INSERT INTO "${functionName}".__logs (block_height, date, timestamp, type, level, message) VALUES ('12345', '${formattedDate}', '${formattedDate}', 'system', 'ERROR', 'Error message')`; expect(query.mock.calls[0][0]).toEqual(expectedQueryStructure); @@ -57,7 +57,7 @@ describe('IndexerLogger', () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, pgClient); const infoEntry = LogEntry.systemInfo('Information message'); - await expect(indexerLogger.writeLogs(infoEntry)).rejects.toThrow('Failed to insert log'); + await expect(indexerLogger.writeLogs([infoEntry])).rejects.toThrow('Failed to insert log'); }); it('should insert a batch of log entries into the database', async () => { @@ -101,7 +101,7 @@ describe('IndexerLogger', () => { const indexerLogger = new IndexerLogger(functionName, LogLevel.ERROR, mockDatabaseConnectionParameters, pgClient); const debugEntry = LogEntry.systemDebug('Debug message'); - await indexerLogger.writeLogs(debugEntry); + await indexerLogger.writeLogs([debugEntry]); expect(query).not.toHaveBeenCalled(); }); diff --git a/runner/src/indexer-logger/indexer-logger.ts b/runner/src/indexer-logger/indexer-logger.ts index 1ade2b310..42bc2a038 100644 --- a/runner/src/indexer-logger/indexer-logger.ts +++ b/runner/src/indexer-logger/indexer-logger.ts @@ -38,9 +38,9 @@ export default class IndexerLogger { } async writeLogs ( - logEntries: LogEntry | LogEntry[], + logEntries: LogEntry[], ): Promise { - const entriesArray = (Array.isArray(logEntries) ? logEntries : [logEntries]).filter(entry => this.shouldLog(entry.level)); ; + const entriesArray = logEntries.filter(entry => this.shouldLog(entry.level)); if (entriesArray.length === 0) return; const spanMessage = `write log for ${entriesArray.length === 1 ? 'single entry' : `batch of ${entriesArray.length}`} through postgres `; From 954528e4884c9f023ba3da1cfbd1c20bfdba94f4 Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 9 Apr 2024 15:12:04 -0400 Subject: [PATCH 18/39] chore: moved log-entry files to indexer-logger --- runner/src/indexer-logger/indexer-logger.test.ts | 2 +- runner/src/indexer-logger/indexer-logger.ts | 4 ++-- runner/src/{log-entry => indexer-logger}/log-entry.test.ts | 0 runner/src/{log-entry => indexer-logger}/log-entry.ts | 1 + runner/src/indexer/indexer.test.ts | 2 +- runner/src/indexer/indexer.ts | 2 +- runner/src/server/runner-service.test.ts | 2 +- runner/src/server/runner-service.ts | 2 +- runner/src/stream-handler/stream-handler.ts | 2 +- runner/tests/integration.test.ts | 2 +- 10 files changed, 10 insertions(+), 9 deletions(-) rename runner/src/{log-entry => indexer-logger}/log-entry.test.ts (100%) rename runner/src/{log-entry => indexer-logger}/log-entry.ts (99%) diff --git a/runner/src/indexer-logger/indexer-logger.test.ts b/runner/src/indexer-logger/indexer-logger.test.ts index d14159285..f985782fc 100644 --- a/runner/src/indexer-logger/indexer-logger.test.ts +++ b/runner/src/indexer-logger/indexer-logger.test.ts @@ -1,7 +1,7 @@ import pgFormat from 'pg-format'; import IndexerLogger from './indexer-logger'; import type PgClient from '../pg-client'; -import LogEntry, { LogLevel } from '../log-entry/log-entry'; +import LogEntry, { LogLevel } from './log-entry'; describe('IndexerLogger', () => { let pgClient: PgClient; diff --git a/runner/src/indexer-logger/indexer-logger.ts b/runner/src/indexer-logger/indexer-logger.ts index 42bc2a038..a47572b84 100644 --- a/runner/src/indexer-logger/indexer-logger.ts +++ b/runner/src/indexer-logger/indexer-logger.ts @@ -3,8 +3,8 @@ import { wrapError } from '../utility'; import PgClient from '../pg-client'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace } from '@opentelemetry/api'; -import type LogEntry from '../log-entry/log-entry'; -import { LogLevel } from '../log-entry/log-entry'; +import type LogEntry from './log-entry'; +import { LogLevel } from './log-entry'; export default class IndexerLogger { tracer = trace.getTracer('queryapi-runner-indexer-logger'); diff --git a/runner/src/log-entry/log-entry.test.ts b/runner/src/indexer-logger/log-entry.test.ts similarity index 100% rename from runner/src/log-entry/log-entry.test.ts rename to runner/src/indexer-logger/log-entry.test.ts diff --git a/runner/src/log-entry/log-entry.ts b/runner/src/indexer-logger/log-entry.ts similarity index 99% rename from runner/src/log-entry/log-entry.ts rename to runner/src/indexer-logger/log-entry.ts index e657da0bd..4cb7dba21 100644 --- a/runner/src/log-entry/log-entry.ts +++ b/runner/src/indexer-logger/log-entry.ts @@ -9,6 +9,7 @@ export enum LogType { SYSTEM = 'system', USER = 'user', } + export default class LogEntry { public readonly timestamp: Date; diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 399908ccd..61bfb0bf4 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -6,7 +6,7 @@ import { VM } from 'vm2'; import DmlHandler from '../dml-handler/dml-handler'; import type PgClient from '../pg-client'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; -import { LogLevel } from '../log-entry/log-entry'; +import { LogLevel } from '../indexer-logger/log-entry'; describe('Indexer unit tests', () => { const HASURA_ROLE = 'morgs_near'; diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 25beb9b19..4299c03e7 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -8,7 +8,7 @@ import DmlHandler from '../dml-handler/dml-handler'; // import IndexerLogger from '../indexer-logger/indexer-logger'; import { type IndexerBehavior, Status } from '../stream-handler/stream-handler'; -import /** LogEntry, LogType, */{ LogLevel } from '../log-entry/log-entry'; +import /** LogEntry, LogType, */{ LogLevel } from '../indexer-logger/log-entry'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace, type Span } from '@opentelemetry/api'; diff --git a/runner/src/server/runner-service.test.ts b/runner/src/server/runner-service.test.ts index d2ec96feb..f3055f47a 100644 --- a/runner/src/server/runner-service.test.ts +++ b/runner/src/server/runner-service.test.ts @@ -1,6 +1,6 @@ import type StreamHandler from '../stream-handler/stream-handler'; import { Status } from '../stream-handler/stream-handler'; -import { LogLevel } from '../log-entry/log-entry'; +import { LogLevel } from '../indexer-logger/log-entry'; import getRunnerService from './runner-service'; import * as grpc from '@grpc/grpc-js'; diff --git a/runner/src/server/runner-service.ts b/runner/src/server/runner-service.ts index e06d65413..b1fe6bc10 100644 --- a/runner/src/server/runner-service.ts +++ b/runner/src/server/runner-service.ts @@ -1,7 +1,7 @@ import { type ServerUnaryCall, type sendUnaryData } from '@grpc/grpc-js'; import * as grpc from '@grpc/grpc-js'; import { Status } from '../stream-handler/stream-handler'; -import { LogLevel } from '../log-entry/log-entry'; +import { LogLevel } from '../indexer-logger/log-entry'; import crypto from 'crypto'; import { type RunnerHandlers } from '../generated/runner/Runner'; diff --git a/runner/src/stream-handler/stream-handler.ts b/runner/src/stream-handler/stream-handler.ts index fd6acc484..cd38ae81a 100644 --- a/runner/src/stream-handler/stream-handler.ts +++ b/runner/src/stream-handler/stream-handler.ts @@ -3,7 +3,7 @@ import { Worker, isMainThread } from 'worker_threads'; import { registerWorkerMetrics, deregisterWorkerMetrics } from '../metrics'; import Indexer from '../indexer'; -import { /* LogType, */ LogLevel } from '../log-entry/log-entry'; +import { /* LogType, */ LogLevel } from '../indexer-logger/log-entry'; export enum Status { RUNNING = 'RUNNING', diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 1c7490e01..52f176602 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -6,7 +6,7 @@ import Indexer from '../src/indexer'; import HasuraClient from '../src/hasura-client'; import Provisioner from '../src/provisioner'; import PgClient from '../src/pg-client'; -import { LogLevel } from '../src/log-entry/log-entry'; +import { LogLevel } from '../src/indexer-logger/log-entry'; import { HasuraGraphQLContainer, type StartedHasuraGraphQLContainer } from './testcontainers/hasura'; import { PostgreSqlContainer, type StartedPostgreSqlContainer } from './testcontainers/postgres'; From 030fa4ce87d9f75ec2f76d23779b2b0846a561e7 Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 14:08:23 -0400 Subject: [PATCH 19/39] feat: provisioning of new users --- runner/src/indexer-meta/indexer-meta.ts | 2 +- runner/src/indexer/indexer.test.ts | 59 +++++++++------- runner/src/indexer/indexer.ts | 82 +++++++++++++++-------- runner/src/test-client.ts | 89 ------------------------- runner/tests/integration.test.ts | 13 ++++ 5 files changed, 104 insertions(+), 141 deletions(-) delete mode 100644 runner/src/test-client.ts diff --git a/runner/src/indexer-meta/indexer-meta.ts b/runner/src/indexer-meta/indexer-meta.ts index b2d3e2652..1da1343ac 100644 --- a/runner/src/indexer-meta/indexer-meta.ts +++ b/runner/src/indexer-meta/indexer-meta.ts @@ -56,7 +56,7 @@ export default class IndexerMeta { const spanMessage = `write log for ${entriesArray.length === 1 ? 'single entry' : `batch of ${entriesArray.length}`} through postgres `; const writeLogSpan = this.tracer.startSpan(spanMessage); - + // todo: change to try catch rather than callback for code consistency await wrapError(async () => { const values = entriesArray.map(entry => [ entry.blockHeight, diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index b3d4f2f4b..6f7a59688 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -4,6 +4,7 @@ import type fetch from 'node-fetch'; import Indexer from './indexer'; import { VM } from 'vm2'; import DmlHandler from '../dml-handler/dml-handler'; +// import IndexerMeta from '../indexer-meta/indexer-meta'; import type PgClient from '../pg-client'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; import { LogLevel } from '../indexer-meta/log-entry'; @@ -196,6 +197,12 @@ CREATE TABLE delete: jest.fn().mockReturnValue([]), } as unknown as DmlHandler; + // const genericMockIndexerMeta : any = { + // writeLogs: jest.fn(), + // setStatus: jest.fn(), + // updateBlockheight: jest.fn() + // } as unknown as IndexerMeta; + const genericDbCredentials: any = { database: 'test_near', host: 'postgres', @@ -234,7 +241,8 @@ CREATE TABLE const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, - dmlHandler: genericMockDmlHandler + dmlHandler: genericMockDmlHandler, + // indexerMeta: genericMockIndexerMeta , }, undefined, config); const functions: Record = {}; @@ -288,7 +296,7 @@ CREATE TABLE dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE /** [] */); const query = ` query { @@ -343,7 +351,7 @@ CREATE TABLE dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE /** [] */); await context.fetchFromSocialApi('/index', { method: 'POST', @@ -372,7 +380,7 @@ CREATE TABLE }); const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE /** [] */); await expect(async () => await context.graphql('query { hello }')).rejects.toThrow('boom'); }); @@ -387,7 +395,7 @@ CREATE TABLE }); const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE /** [] */); const query = 'query($name: String) { hello(name: $name) }'; const variables = { name: 'morgan' }; @@ -508,7 +516,7 @@ CREATE TABLE );`; // Does not outright throw an error but instead returns an empty object - expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1)) + expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1 /** [] */)) .toStrictEqual({}); }); @@ -519,7 +527,8 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); const objToInsert = [{ account_id: 'morgs_near', @@ -553,7 +562,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); const promises = []; for (let i = 1; i <= 100; i++) { @@ -588,7 +597,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); const objToSelect = { account_id: 'morgs_near', @@ -614,7 +623,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); const whereObj = { account_id: 'morgs_near', @@ -644,7 +653,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); const objToInsert = [{ account_id: 'morgs_near', @@ -676,7 +685,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'/** [] */); const deleteFilter = { account_id: 'morgs_near', @@ -691,7 +700,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'/** [] */); expect(Object.keys(context.db)).toStrictEqual([ 'CreatorQuest', @@ -729,7 +738,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'); + const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'/** [] */); expect(Object.keys(context.db)).toStrictEqual([]); }); @@ -797,7 +806,7 @@ CREATE TABLE }, shards: {} } as unknown as StreamerMessage) as unknown as Block; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler/** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -876,7 +885,7 @@ CREATE TABLE }, shards: {} } as unknown as StreamerMessage) as unknown as Block; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler/** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -912,7 +921,7 @@ CREATE TABLE fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions = { 'morgs.near/test': { @@ -956,7 +965,7 @@ CREATE TABLE fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -992,7 +1001,7 @@ CREATE TABLE fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1030,7 +1039,7 @@ CREATE TABLE fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1070,7 +1079,7 @@ CREATE TABLE fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn().mockRejectedValue(error), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1132,19 +1141,19 @@ CREATE TABLE const indexerDebug = new Indexer( { log_level: LogLevel.DEBUG }, - { fetch: mockFetchDebug as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler }, + { fetch: mockFetchDebug as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config ); const indexerInfo = new Indexer( { log_level: LogLevel.INFO }, - { fetch: mockFetchInfo as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler }, + { fetch: mockFetchInfo as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config ); const indexerError = new Indexer( { log_level: LogLevel.ERROR }, - { fetch: mockFetchError as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler }, + { fetch: mockFetchError as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config ); @@ -1209,7 +1218,7 @@ CREATE TABLE }); const role = 'morgs_near'; const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE/** [] */); const mutation = ` mutation { diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index f17644346..77a1fd3e3 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -5,11 +5,10 @@ import { Parser } from 'node-sql-parser'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; -// import IndexerMeta from '../indexer-meta/indexer-meta'; +import LogEntry, { LogLevel } from '../indexer-meta/log-entry'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; -import { IndexerStatus } from '../indexer-meta/indexer-meta'; -import { LogLevel } from '../indexer-meta/log-entry'; +import /** IndexerMeta, */ { IndexerStatus } from '../indexer-meta/indexer-meta'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace, type Span } from '@opentelemetry/api'; @@ -17,6 +16,7 @@ interface Dependencies { fetch: typeof fetch provisioner: Provisioner dmlHandler?: DmlHandler + // indexerMeta?: IndexerMeta parser: Parser }; @@ -47,6 +47,8 @@ interface IndexerFunction { interface Config { hasuraAdminSecret: string hasuraEndpoint: string + hasuraHostOverride?: string + hasuraPortOverride?: number } const defaultConfig: Config = { @@ -94,7 +96,7 @@ export default class Indexer { const simultaneousPromises: Array> = []; const allMutations: string[] = []; - // const logEntries: LogEntry[] = []; + const logEntries: LogEntry[] = []; for (const functionName in functions) { try { @@ -109,30 +111,36 @@ export default class Indexer { if (!await this.deps.provisioner.fetchUserApiProvisioningStatus(indexerFunction.account_id, indexerFunction.function_name)) { await this.setStatus(functionName, blockHeight, IndexerStatus.PROVISIONING); simultaneousPromises.push(this.writeLog(LogLevel.INFO, functionName, blockHeight, 'Provisioning endpoint: starting')); - // logEntries.push({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.INFO, message: 'Provisioning endpoint: starting' }); + // const provisionStartLogEntry = LogEntry.systemInfo('Provisioning endpoint: starting', blockHeight); + // logEntries.push(provisionStartLogEntry); await this.deps.provisioner.provisionUserApi(indexerFunction.account_id, indexerFunction.function_name, indexerFunction.schema); simultaneousPromises.push(this.writeLog(LogLevel.INFO, functionName, blockHeight, 'Provisioning endpoint: successful')); - // logEntries.push({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.INFO, message: 'Provisioning endpoint: successful' }); + // const provisionSuccessLogEntry = LogEntry.systemInfo('Provisioning endpoint: starting', blockHeight); + // logEntries.push(provisionSuccessLogEntry); } } catch (e) { const error = e as Error; simultaneousPromises.push(this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Provisioning endpoint: failure', error.message)); - // logEntries.push({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.INFO, message: `Provisioning endpoint: failure ${error.message}` }); + // const provisionFailureLogEntry = LogEntry.systemError('Provisioning endpoint: starting', blockHeight); + // logEntries.push(provisionFailureLogEntry); throw error; } } - // logEntries.push({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.INFO, message: runningMessage }); + const runningLogEntry = LogEntry.systemInfo(runningMessage, blockHeight); + logEntries.push(runningLogEntry); // Cache database credentials after provisioning const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { this.database_connection_parameters ??= await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; - // this.indexer_logger ??= new IndexerLogger(functionName, this.indexer_behavior.log_level, this.database_connection_parameters); + // this.database_connection_parameters = await this.getDatabaseConnectionParams(hasuraRoleName); + // this.deps.indexerMeta ??= new IndexerMeta(functionName, this.indexer_behavior.log_level, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); } catch (e) { const error = e as Error; await this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Failed to get database connection parameters', error.message); - // logEntries.push({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.ERROR, message: `Failed to get database connection parameters ${error.message}` }); + // const databaseErrorLogEntry = LogEntry.systemError('Failed to get database connection parameters', blockHeight); + // logEntries.push(databaseErrorLogEntry); throw error; } finally { credentialsFetchSpan.end(); @@ -142,7 +150,7 @@ export default class Indexer { const resourceCreationSpan = this.tracer.startSpan('prepare vm and context to run indexer code'); simultaneousPromises.push(this.setStatus(functionName, blockHeight, IndexerStatus.RUNNING)); const vm = new VM({ allowAsync: true }); - const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName/* , logEntries */); + const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName /** , logEntries */); vm.freeze(block, 'block'); vm.freeze(lakePrimitives, 'primitives'); @@ -157,7 +165,8 @@ export default class Indexer { } catch (e) { const error = e as Error; simultaneousPromises.push(this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Error running IndexerFunction', error.message)); - // logEntries.push({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.ERROR, message: `Error running IndexerFunction ${error.message}` }); + const indexerErrorLogEntry = LogEntry.systemError('Error running IndexerFunction', blockHeight); + logEntries.push(indexerErrorLogEntry); throw e; } finally { runIndexerCodeSpan.end(); @@ -169,12 +178,23 @@ export default class Indexer { await this.setStatus(functionName, blockHeight, IndexerStatus.FAILING); throw e; } finally { - await Promise.all([...simultaneousPromises]); + await Promise.all([...simultaneousPromises/** , (this.deps.indexerMeta as IndexerMeta).writeLogs(logEntries) */]); } } return allMutations; } + // async getDatabaseConnectionParams(hasuraRoleName: string): Promise { + // const { username, password, database, host, port } = await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; + // return { + // username, + // password, + // database, + // host: this.config.hasuraHostOverride ?? host, + // port: this.config.hasuraPortOverride ?? port + // } + // } + enableAwaitTransform (indexerFunction: string): string { return ` async function f(){ @@ -190,7 +210,7 @@ export default class Indexer { ].reduce((acc, val) => val(acc), indexerFunction); } - buildContext (schema: string, functionName: string, blockHeight: number, hasuraRoleName: string/*, logEntries: LogEntry[] */): Context { + buildContext (schema: string, functionName: string, blockHeight: number, hasuraRoleName: string /** logEntries: LogEntry[] */): Context { const functionNameWithoutAccount = functionName.split('/')[1].replace(/[.-]/g, '_'); const schemaName = functionName.replace(/[^a-zA-Z0-9]/g, '_'); return { @@ -221,24 +241,28 @@ export default class Indexer { }, debug: async (...log) => { return await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, ...log); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.DEBUG, message: log.join(' ') }, logEntries, functionName); + // const debugLogEntry = LogEntry.systemDebug(log.join(' '), blockHeight); + // return await this.writeLog(debugLogEntry, logEntries as LogEntry[], functionName); }, log: async (...log) => { return await this.writeLog(LogLevel.INFO, functionName, blockHeight, ...log); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.INFO, message: log.join(' ') }, logEntries, functionName); + // const infoLogEntry = LogEntry.systemInfo(log.join(' '), blockHeight); + // return await this.writeLog(infoLogEntry, logEntries as LogEntry[], functionName); }, warn: async (...log) => { return await this.writeLog(LogLevel.WARN, functionName, blockHeight, ...log); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.WARN, message: log.join(' ') }, logEntries, functionName); + // const warnLogEntry = LogEntry.systemWarn(log.join(' '), blockHeight); + // return await this.writeLog(warnLogEntry, logEntries as LogEntry[], functionName); }, error: async (...log) => { return await this.writeLog(LogLevel.ERROR, functionName, blockHeight, ...log); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.ERROR, message: log.join(' ') }, logEntries, functionName); + // const errorLogEntry = LogEntry.systemError(log.join(' '), blockHeight); + // return await this.writeLog(errorLogEntry, logEntries as LogEntry[], functionName); }, fetchFromSocialApi: async (path, options) => { return await this.deps.fetch(`https://api.near.social${path}`, options); }, - db: this.buildDatabaseContext(functionName, schemaName, schema, blockHeight/*, logEntries */) + db: this.buildDatabaseContext(functionName, schemaName, schema, blockHeight /** , logEntries as LogEntry[] */) }; } @@ -348,7 +372,8 @@ export default class Indexer { // Write log before calling insert await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, `Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.DEBUG, message: `Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}` }, logEntries, functionName); + // const insertLogEntry = LogEntry.systemDebug(`Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`, blockHeight); + // await this.writeLog(insertLogEntry, logEntries, functionName); // Call insert with parameters return await dmlHandler.insert(schemaName, tableDefinitionNames, Array.isArray(objectsToInsert) ? objectsToInsert : [objectsToInsert]); } finally { @@ -362,7 +387,8 @@ export default class Indexer { // Write log before calling select await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, `Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.DEBUG, message: `Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit` }, logEntries, functionName); + // const selectLogEntry = LogEntry.systemDebug(`Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`, blockHeight); + // await this.writeLog(selectLogEntry, logEntries, functionName); // Call select with parameters return await dmlHandler.select(schemaName, tableDefinitionNames, filterObj, limit); } finally { @@ -376,7 +402,8 @@ export default class Indexer { // Write log before calling update await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, `Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.DEBUG, message: `Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}` }, logEntries, functionName); + // const updateLogEntry = LogEntry.systemDebug(`Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`, blockHeight); + // await this.writeLog(updateLogEntry, logEntries, functionName); // Call update with parameters return await dmlHandler.update(schemaName, tableDefinitionNames, filterObj, updateObj); } finally { @@ -390,7 +417,8 @@ export default class Indexer { // Write log before calling upsert await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, `Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.DEBUG, message: `Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}` }, logEntries, functionName); + // const upsertLogEntry = LogEntry.systemDebug(`Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`, blockHeight); + // await this.writeLog(upsertLogEntry, logEntries, functionName); // Call upsert with parameters return await dmlHandler.upsert(schemaName, tableDefinitionNames, Array.isArray(objectsToInsert) ? objectsToInsert : [objectsToInsert], conflictColumns, updateColumns); } finally { @@ -404,7 +432,8 @@ export default class Indexer { // Write log before calling delete await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, `Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`); - // await this.writeLog({ blockHeight, logTimestamp: new Date(), logType: LogType.SYSTEM, logLevel: LogLevel.DEBUG, message: `Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}` }, logEntries, functionName); + // const deleteLogEntry = LogEntry.systemDebug(`Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`, blockHeight); + // await this.writeLog(deleteLogEntry, logEntries, functionName); // Call delete with parameters return await dmlHandler.delete(schemaName, tableDefinitionNames, filterObj); } finally { @@ -460,13 +489,13 @@ export default class Indexer { // async writeLog (logEntry: LogEntry, logEntries: LogEntry[], functionName: string): Promise { // logEntries.push(logEntry); - + // console.log(functionName); // const { logLevel, blockHeight, message } = logEntry; // return await this.writeLogOld(logLevel, functionName, blockHeight, message); // } // async callWriteLog (logEntry: LogEntry): Promise { - // await (this.indexer_logger as IndexerLogger).writeLogs(logEntry); + // await (this.deps.indexerMeta as IndexerMeta).writeLogs([logEntry]); // } async updateIndexerBlockHeight (functionName: string, blockHeight: number, isHistorical: boolean): Promise { @@ -511,6 +540,7 @@ export default class Indexer { } } + // todo rename to writeLogOld async writeLog (logLevel: LogLevel, functionName: string, blockHeight: number, ...message: any[]): Promise { if (logLevel < this.indexer_behavior.log_level) { return; diff --git a/runner/src/test-client.ts b/runner/src/test-client.ts deleted file mode 100644 index caeee161d..000000000 --- a/runner/src/test-client.ts +++ /dev/null @@ -1,89 +0,0 @@ -// Run with 'npx ts-node src/test-client.ts' // located at -import runnerClient from './server/runner-client'; -const schema = ` -CREATE TABLE -versions ( - "id" SERIAL PRIMARY KEY, - "block_height" BIGINT NOT NULL, - "block_timestamp_ms" BIGINT NOT NULL, - "code" VARCHAR NOT NULL, - "component_author_id" VARCHAR NOT NULL, - "component_name" VARCHAR NOT NULL, - "lines_added" INT NOT NULL, - "lines_removed" INT NOT NULL, - "receipt_id" VARCHAR NOT NULL -); -CREATE TABLE -metadata ( - "component_id" VARCHAR PRIMARY KEY, - "block_height" BIGINT NOT NULL, - "block_timestamp_ms" BIGINT NOT NULL, - "code" VARCHAR NOT NULL, - "component_author_id" VARCHAR NOT NULL, - "component_name" VARCHAR NOT NULL, - "star_count" INT NOT NULL, - "fork_count" INT NOT NULL, - "name" VARCHAR, - "image_ipfs_cid" VARCHAR, - "description" VARCHAR, - "fork_of_source" VARCHAR, - "fork_of_block_height" BIGINT, - "tags" VARCHAR, - "website" VARCHAR -); -`; -const code = ` -const h = block.header().height; -const blockTimestampMs = Math.floor( - Number(block.header().timestampNanosec) / 1e6 -); -const code = 'console.log("hello world")'; -const componentAuthorId = 'kevin0.near'; -const componentName = 'test_component_1'; -const linesAdded = 1; -const linesRemoved = 1; -receiptId = '3WGZ91JVF2kxF54SryuktCCmH2kgijuGM9P3uoqSGs5s' -await console.debug('debug log'); -await console.log('info log'); -await console.log('Hello this is some indexer log'); -await console.error('error log'); -// await context.db.Metadata.insert( -// {block_height: h, block_timestamp_ms: blockTimestampMs, code, component_author_id: componentAuthorId, component_name: componentName, star_count: 0, fork_count: 0, name: 'test', image_ipfs_cid: 'test', description: 'test', fork_of_source: 'test', fork_of_block_height: 0, tags: 'test', website: 'test'} -// ); -`; -const indexer = { - account_id: 'kevin10.near', - redis_stream: 'test:block_stream', - function_name: 'component_01', - code, - start_block_height: 113448278, - schema, - provisioned: true, - indexer_rule: { - indexer_rule_kind: 'Action', - matching_rule: { - rule: 'ACTION_ANY', - affected_account_id: 'social.near', - status: 'SUCCESS' - }, - id: null, - name: null - } -}; -void (async function main () { - // console.log(indexer.redis_stream, indexer.account_id, indexer.function_name, indexer.code, indexer.schema) - runnerClient.StartExecutor({ - redisStream: indexer.redis_stream, - accountId: indexer.account_id, - functionName: indexer.function_name, - code: indexer.code, - schema: indexer.schema - }, (err, response) => { - if (err) { - } else { - console.log('start: ', response); - console.log('running...'); - } - }); - console.log('done'); -})(); diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index f4f34168b..c27fe28a4 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -6,6 +6,7 @@ import Indexer from '../src/indexer'; import HasuraClient from '../src/hasura-client'; import Provisioner from '../src/provisioner'; import PgClient from '../src/pg-client'; +// import IndexerMeta from '../src/indexer-meta/indexer-meta'; import { HasuraGraphQLContainer, type StartedHasuraGraphQLContainer } from './testcontainers/hasura'; import { PostgreSqlContainer, type StartedPostgreSqlContainer } from './testcontainers/postgres'; @@ -70,18 +71,30 @@ describe('Indexer integration', () => { hasuraPortOverride: Number(postgresContainer.getPort()), } ); + // const userDB = await provisioner.getDatabaseConnectionParameters('morgs_near'); + + // const indexerMeta = new IndexerMeta('morgs_near', LogLevel.INFO, { + // host: postgresContainer.getIpAddress(), + // port: Number(postgresContainer.getPort()), + // database: userDB.database, + // username: userDB.username, + // password: userDB.password + // }, pgClient); const indexer = new Indexer( { log_level: LogLevel.INFO, }, { + // indexerMeta, provisioner }, undefined, { hasuraAdminSecret: hasuraContainer.getAdminSecret(), hasuraEndpoint: hasuraContainer.getEndpoint(), + hasuraHostOverride: postgresContainer.getIpAddress(), + hasuraPortOverride: Number(postgresContainer.getPort()) } ); From 03ded3fad162f652ad74de401bf0c7dbe770e55e Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Wed, 10 Apr 2024 14:19:00 -0400 Subject: [PATCH 20/39] chore: remove console.log --- runner/src/indexer/indexer.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index d418a6c40..d8b793c0d 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -492,7 +492,6 @@ export default class Indexer { // async writeLog (logEntry: LogEntry, logEntries: LogEntry[], functionName: string): Promise { // logEntries.push(logEntry); - // console.log(functionName); // const { logLevel, blockHeight, message } = logEntry; // return await this.writeLogOld(logLevel, functionName, blockHeight, message); // } From 8d48fc5fc0d0c6357948b0ece2a07e0307a70037 Mon Sep 17 00:00:00 2001 From: Kevin Zhang <42101107+Kevin101Zhang@users.noreply.github.com> Date: Wed, 10 Apr 2024 14:23:48 -0400 Subject: [PATCH 21/39] added back commented out test for metaDataTableDDL --- runner/src/provisioner/provisioner.test.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/runner/src/provisioner/provisioner.test.ts b/runner/src/provisioner/provisioner.test.ts index 927925d52..2bb13ca61 100644 --- a/runner/src/provisioner/provisioner.test.ts +++ b/runner/src/provisioner/provisioner.test.ts @@ -114,6 +114,7 @@ describe('Provisioner', () => { ]); expect(hasuraClient.addDatasource).toBeCalledWith(sanitizedAccountId, password, sanitizedAccountId); expect(hasuraClient.createSchema).toBeCalledWith(sanitizedAccountId, schemaName); + // expect(hasuraClient.executeSqlOnSchema).toBeCalledWith(sanitizedAccountId, schemaName, metadataTableDDL()); expect(hasuraClient.executeSqlOnSchema).toHaveBeenNthCalledWith(1, sanitizedAccountId, schemaName, databaseSchema); expect(hasuraClient.executeSqlOnSchema).toHaveBeenNthCalledWith(2, sanitizedAccountId, schemaName, logsDDL); expect(hasuraClient.getTableNames).toBeCalledWith(schemaName, sanitizedAccountId); From 80eb9f1f19987a4bd7fc7e911b5aa9d27acf7f9d Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 14:32:26 -0400 Subject: [PATCH 22/39] uncommented skip --- runner/src/provisioner/provisioner.test.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/runner/src/provisioner/provisioner.test.ts b/runner/src/provisioner/provisioner.test.ts index 927925d52..9409cc485 100644 --- a/runner/src/provisioner/provisioner.test.ts +++ b/runner/src/provisioner/provisioner.test.ts @@ -215,8 +215,7 @@ describe('Provisioner', () => { await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to add permissions to tables: some error'); }); - // TODO re-enable once logs table is created - it.skip('throws an error when it fails to create logs table', async () => { + it('throws an error when it fails to create logs table', async () => { hasuraClient.executeSqlOnSchema = jest.fn().mockRejectedValue(error); await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to run logs script: some error'); @@ -235,7 +234,7 @@ describe('Provisioner', () => { await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to run user script: some error'); }); - it.skip('throws when grant cron access fails', async () => { + it('throws when grant cron access fails', async () => { cronPgClient.query = jest.fn().mockRejectedValue(error); await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to setup partitioned logs table: Failed to grant cron access: some error'); From e097cbc257f4683ae8a2c6d46e94d45b215488dd Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 14:36:28 -0400 Subject: [PATCH 23/39] uncommented skip, removed redundant test cases --- runner/src/provisioner/provisioner.test.ts | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/runner/src/provisioner/provisioner.test.ts b/runner/src/provisioner/provisioner.test.ts index e452d6965..a6502c1ff 100644 --- a/runner/src/provisioner/provisioner.test.ts +++ b/runner/src/provisioner/provisioner.test.ts @@ -216,25 +216,12 @@ describe('Provisioner', () => { await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to add permissions to tables: some error'); }); - it('throws an error when it fails to create logs table', async () => { - hasuraClient.executeSqlOnSchema = jest.fn().mockRejectedValue(error); - - await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to run logs script: some error'); - }); - it.skip('throws an error when it fails to create metadata table', async () => { hasuraClient.executeSqlOnSchema = jest.fn().mockResolvedValueOnce(null).mockRejectedValue(error); await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to create metadata table in morgs_near.morgs_near_test_function: some error'); }); - it('throws an error when it fails to run sql', async () => { - // hasuraClient.executeSqlOnSchema = jest.fn().mockResolvedValueOnce(null).mockResolvedValueOnce(null).mockRejectedValue(error); - hasuraClient.executeSqlOnSchema = jest.fn().mockRejectedValue(error); - - await expect(provisioner.provisionUserApi(accountId, functionName, databaseSchema)).rejects.toThrow('Failed to provision endpoint: Failed to run user script: some error'); - }); - it('throws when grant cron access fails', async () => { cronPgClient.query = jest.fn().mockRejectedValue(error); From c9fc5155f0828553636ec0e48035549e97e3e73f Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 14:39:49 -0400 Subject: [PATCH 24/39] chore: removed comment --- runner/src/indexer-meta/indexer-meta.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runner/src/indexer-meta/indexer-meta.ts b/runner/src/indexer-meta/indexer-meta.ts index 1da1343ac..493c8b450 100644 --- a/runner/src/indexer-meta/indexer-meta.ts +++ b/runner/src/indexer-meta/indexer-meta.ts @@ -56,7 +56,7 @@ export default class IndexerMeta { const spanMessage = `write log for ${entriesArray.length === 1 ? 'single entry' : `batch of ${entriesArray.length}`} through postgres `; const writeLogSpan = this.tracer.startSpan(spanMessage); - // todo: change to try catch rather than callback for code consistency + await wrapError(async () => { const values = entriesArray.map(entry => [ entry.blockHeight, From c8620424060355b11f38f25b45c9e3ccafa6ec4a Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 14:40:27 -0400 Subject: [PATCH 25/39] chore: removed comment --- runner/src/indexer-meta/indexer-meta.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/runner/src/indexer-meta/indexer-meta.ts b/runner/src/indexer-meta/indexer-meta.ts index 493c8b450..fc527ff6d 100644 --- a/runner/src/indexer-meta/indexer-meta.ts +++ b/runner/src/indexer-meta/indexer-meta.ts @@ -56,7 +56,6 @@ export default class IndexerMeta { const spanMessage = `write log for ${entriesArray.length === 1 ? 'single entry' : `batch of ${entriesArray.length}`} through postgres `; const writeLogSpan = this.tracer.startSpan(spanMessage); - await wrapError(async () => { const values = entriesArray.map(entry => [ entry.blockHeight, From ef4d29c7dba251c3f3a83198685fc9fc6afa85fd Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 15:00:13 -0400 Subject: [PATCH 26/39] mocked provisionLogsIfNeeded for test --- runner/src/indexer/indexer.test.ts | 4 ++++ runner/src/indexer/indexer.ts | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 6f7a59688..df24a1b18 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -920,6 +920,7 @@ CREATE TABLE getDatabaseConnectionParameters: jest.fn().mockReturnValue(genericDbCredentials), fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn(), + provisionLogsIfNeeded: jest.fn(), }; const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); @@ -964,6 +965,7 @@ CREATE TABLE getDatabaseConnectionParameters: jest.fn().mockReturnValue(genericDbCredentials), fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), + provisionLogsIfNeeded: jest.fn(), }; const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); @@ -1000,6 +1002,7 @@ CREATE TABLE getDatabaseConnectionParameters: jest.fn().mockReturnValue(genericDbCredentials), fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), + provisionLogsIfNeeded: jest.fn(), }; const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); @@ -1038,6 +1041,7 @@ CREATE TABLE getDatabaseConnectionParameters: jest.fn().mockReturnValue(genericDbCredentials), fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(true), provisionUserApi: jest.fn(), + provisionLogsIfNeeded: jest.fn(), }; const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index d8b793c0d..307d569a3 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -120,7 +120,7 @@ export default class Indexer { } // TODO enable when new logs implementation is ready - // await this.deps.provisioner.provisionLogsIfNeeded(indexerFunction.account_id, indexerFunction.function_name); + await this.deps.provisioner.provisionLogsIfNeeded(indexerFunction.account_id, indexerFunction.function_name); } catch (e) { const error = e as Error; simultaneousPromises.push(this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Provisioning endpoint: failure', error.message)); From d75bc77d687bd9314231fe69b7b620fecc256fc6 Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 16:10:23 -0400 Subject: [PATCH 27/39] feat: uncommented out logging to new log table --- runner/src/indexer/indexer.test.ts | 64 ++++----- runner/src/indexer/indexer.ts | 141 +++++++++----------- runner/src/stream-handler/stream-handler.ts | 14 +- runner/tests/integration.test.ts | 11 -- 4 files changed, 101 insertions(+), 129 deletions(-) diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index df24a1b18..374d5799f 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -4,7 +4,7 @@ import type fetch from 'node-fetch'; import Indexer from './indexer'; import { VM } from 'vm2'; import DmlHandler from '../dml-handler/dml-handler'; -// import IndexerMeta from '../indexer-meta/indexer-meta'; +import type IndexerMeta from '../indexer-meta/indexer-meta'; import type PgClient from '../pg-client'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; import { LogLevel } from '../indexer-meta/log-entry'; @@ -197,11 +197,11 @@ CREATE TABLE delete: jest.fn().mockReturnValue([]), } as unknown as DmlHandler; - // const genericMockIndexerMeta : any = { - // writeLogs: jest.fn(), - // setStatus: jest.fn(), - // updateBlockheight: jest.fn() - // } as unknown as IndexerMeta; + const genericMockIndexerMeta: any = { + writeLogs: jest.fn(), + setStatus: jest.fn(), + updateBlockheight: jest.fn() + } as unknown as IndexerMeta; const genericDbCredentials: any = { database: 'test_near', @@ -242,7 +242,7 @@ CREATE TABLE fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler, - // indexerMeta: genericMockIndexerMeta , + indexerMeta: genericMockIndexerMeta, }, undefined, config); const functions: Record = {}; @@ -296,7 +296,7 @@ CREATE TABLE dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE /** [] */); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE, []); const query = ` query { @@ -351,7 +351,7 @@ CREATE TABLE dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE /** [] */); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE, []); await context.fetchFromSocialApi('/index', { method: 'POST', @@ -380,7 +380,7 @@ CREATE TABLE }); const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE /** [] */); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, INVALID_HASURA_ROLE, []); await expect(async () => await context.graphql('query { hello }')).rejects.toThrow('boom'); }); @@ -395,7 +395,7 @@ CREATE TABLE }); const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE /** [] */); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE, []); const query = 'query($name: String) { hello(name: $name) }'; const variables = { name: 'morgan' }; @@ -516,7 +516,7 @@ CREATE TABLE );`; // Does not outright throw an error but instead returns an empty object - expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1 /** [] */)) + expect(indexer.buildDatabaseContext('test_account', 'test_schema_name', schemaWithDuplicateSanitizedTableNames, 1, [])) .toStrictEqual({}); }); @@ -528,7 +528,7 @@ CREATE TABLE dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); const objToInsert = [{ account_id: 'morgs_near', @@ -562,7 +562,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); const promises = []; for (let i = 1; i <= 100; i++) { @@ -597,7 +597,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); const objToSelect = { account_id: 'morgs_near', @@ -623,7 +623,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); const whereObj = { account_id: 'morgs_near', @@ -653,7 +653,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres' /** [] */); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); const objToInsert = [{ account_id: 'morgs_near', @@ -685,7 +685,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: mockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'/** [] */); + const context = indexer.buildContext(SOCIAL_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); const deleteFilter = { account_id: 'morgs_near', @@ -700,7 +700,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres'/** [] */); + const context = indexer.buildContext(STRESS_TEST_SCHEMA, 'morgs.near/social_feed1', 1, 'postgres', []); expect(Object.keys(context.db)).toStrictEqual([ 'CreatorQuest', @@ -738,7 +738,7 @@ CREATE TABLE fetch: genericMockFetch as unknown as typeof fetch, dmlHandler: genericMockDmlHandler }, genericDbCredentials, config); - const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres'/** [] */); + const context = indexer.buildContext('', 'morgs.near/social_feed1', 1, 'postgres', []); expect(Object.keys(context.db)).toStrictEqual([]); }); @@ -806,7 +806,7 @@ CREATE TABLE }, shards: {} } as unknown as StreamerMessage) as unknown as Block; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler/** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -885,7 +885,7 @@ CREATE TABLE }, shards: {} } as unknown as StreamerMessage) as unknown as Block; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler/** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions: Record = {}; functions['buildnear.testnet/test'] = { @@ -922,7 +922,7 @@ CREATE TABLE provisionUserApi: jest.fn(), provisionLogsIfNeeded: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions = { 'morgs.near/test': { @@ -967,7 +967,7 @@ CREATE TABLE provisionUserApi: jest.fn(), provisionLogsIfNeeded: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1004,7 +1004,7 @@ CREATE TABLE provisionUserApi: jest.fn(), provisionLogsIfNeeded: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1017,7 +1017,7 @@ CREATE TABLE await indexer.runFunctions(mockBlock, functions, false, { provision: true }); expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); - expect(provisioner.getDatabaseConnectionParameters).toHaveBeenCalledTimes(1); + expect(provisioner.getDatabaseConnectionParameters).toHaveBeenCalledTimes(3); // todo: temporary fix for the test to ensure new logging works }); test('Indexer.runFunctions() supplies the required role to the GraphQL endpoint', async () => { @@ -1043,7 +1043,7 @@ CREATE TABLE provisionUserApi: jest.fn(), provisionLogsIfNeeded: jest.fn(), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1083,7 +1083,7 @@ CREATE TABLE fetchUserApiProvisioningStatus: jest.fn().mockReturnValue(false), provisionUserApi: jest.fn().mockRejectedValue(error), }; - const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, undefined, config); + const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch, provisioner, dmlHandler: genericMockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config); const functions: Record = { 'morgs.near/test': { @@ -1145,19 +1145,19 @@ CREATE TABLE const indexerDebug = new Indexer( { log_level: LogLevel.DEBUG }, - { fetch: mockFetchDebug as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, + { fetch: mockFetchDebug as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config ); const indexerInfo = new Indexer( { log_level: LogLevel.INFO }, - { fetch: mockFetchInfo as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, + { fetch: mockFetchInfo as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config ); const indexerError = new Indexer( { log_level: LogLevel.ERROR }, - { fetch: mockFetchError as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler /** , indexerMeta: genericMockIndexerMeta */ }, + { fetch: mockFetchError as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: mockDmlHandler, indexerMeta: genericMockIndexerMeta }, undefined, config ); @@ -1222,7 +1222,7 @@ CREATE TABLE }); const role = 'morgs_near'; const indexer = new Indexer(defaultIndexerBehavior, { fetch: mockFetch as unknown as typeof fetch }, undefined, config); - const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE/** [] */); + const context = indexer.buildContext(SIMPLE_SCHEMA, INDEXER_NAME, 1, HASURA_ROLE, []); const mutation = ` mutation { diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 307d569a3..54ca0e9cc 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -8,7 +8,7 @@ import DmlHandler from '../dml-handler/dml-handler'; import LogEntry, { LogLevel } from '../indexer-meta/log-entry'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; -import /** IndexerMeta, */ { IndexerStatus } from '../indexer-meta/indexer-meta'; +import IndexerMeta, { IndexerStatus } from '../indexer-meta/indexer-meta'; import { type DatabaseConnectionParameters } from '../provisioner/provisioner'; import { trace, type Span } from '@opentelemetry/api'; @@ -16,7 +16,7 @@ interface Dependencies { fetch: typeof fetch provisioner: Provisioner dmlHandler?: DmlHandler - // indexerMeta?: IndexerMeta + indexerMeta?: IndexerMeta parser: Parser }; @@ -103,29 +103,28 @@ export default class Indexer { const indexerFunction = functions[functionName]; const runningMessage = `Running function ${functionName} on block ${blockHeight}, lag is: ${lag?.toString()}ms from block timestamp`; - simultaneousPromises.push(this.writeLog(LogLevel.INFO, functionName, blockHeight, runningMessage)); + simultaneousPromises.push(this.writeLogOld(LogLevel.INFO, functionName, blockHeight, runningMessage)); const hasuraRoleName = functionName.split('/')[0].replace(/[.-]/g, '_'); if (options.provision && !indexerFunction.provisioned) { try { if (!await this.deps.provisioner.fetchUserApiProvisioningStatus(indexerFunction.account_id, indexerFunction.function_name)) { await this.setStatus(functionName, blockHeight, IndexerStatus.PROVISIONING); - simultaneousPromises.push(this.writeLog(LogLevel.INFO, functionName, blockHeight, 'Provisioning endpoint: starting')); - // const provisionStartLogEntry = LogEntry.systemInfo('Provisioning endpoint: starting', blockHeight); - // logEntries.push(provisionStartLogEntry); + simultaneousPromises.push(this.writeLogOld(LogLevel.INFO, functionName, blockHeight, 'Provisioning endpoint: starting')); + const provisionStartLogEntry = LogEntry.systemInfo('Provisioning endpoint: starting', blockHeight); + logEntries.push(provisionStartLogEntry); await this.deps.provisioner.provisionUserApi(indexerFunction.account_id, indexerFunction.function_name, indexerFunction.schema); - simultaneousPromises.push(this.writeLog(LogLevel.INFO, functionName, blockHeight, 'Provisioning endpoint: successful')); - // const provisionSuccessLogEntry = LogEntry.systemInfo('Provisioning endpoint: starting', blockHeight); - // logEntries.push(provisionSuccessLogEntry); + simultaneousPromises.push(this.writeLogOld(LogLevel.INFO, functionName, blockHeight, 'Provisioning endpoint: successful')); + const provisionSuccessLogEntry = LogEntry.systemInfo('Provisioning endpoint: successful', blockHeight); + logEntries.push(provisionSuccessLogEntry); } - // TODO enable when new logs implementation is ready await this.deps.provisioner.provisionLogsIfNeeded(indexerFunction.account_id, indexerFunction.function_name); } catch (e) { const error = e as Error; - simultaneousPromises.push(this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Provisioning endpoint: failure', error.message)); - // const provisionFailureLogEntry = LogEntry.systemError('Provisioning endpoint: starting', blockHeight); - // logEntries.push(provisionFailureLogEntry); + simultaneousPromises.push(this.writeLogOld(LogLevel.ERROR, functionName, blockHeight, 'Provisioning endpoint: failure', error.message)); + const provisionFailureLogEntry = LogEntry.systemError('Provisioning endpoint: failure', blockHeight); + logEntries.push(provisionFailureLogEntry); throw error; } } @@ -135,15 +134,15 @@ export default class Indexer { // Cache database credentials after provisioning const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { - this.database_connection_parameters ??= await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; - // this.database_connection_parameters = await this.getDatabaseConnectionParams(hasuraRoleName); - // this.deps.indexerMeta ??= new IndexerMeta(functionName, this.indexer_behavior.log_level, this.database_connection_parameters); + // this.database_connection_parameters ??= await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; + this.database_connection_parameters = await this.getDatabaseConnectionParams(hasuraRoleName); + this.deps.indexerMeta ??= new IndexerMeta(functionName, this.indexer_behavior.log_level, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); } catch (e) { const error = e as Error; - await this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Failed to get database connection parameters', error.message); - // const databaseErrorLogEntry = LogEntry.systemError('Failed to get database connection parameters', blockHeight); - // logEntries.push(databaseErrorLogEntry); + await this.writeLogOld(LogLevel.ERROR, functionName, blockHeight, 'Failed to get database connection parameters', error.message); + const databaseErrorLogEntry = LogEntry.systemError('Failed to get database connection parameters', blockHeight); + logEntries.push(databaseErrorLogEntry); throw error; } finally { credentialsFetchSpan.end(); @@ -153,7 +152,7 @@ export default class Indexer { const resourceCreationSpan = this.tracer.startSpan('prepare vm and context to run indexer code'); simultaneousPromises.push(this.setStatus(functionName, blockHeight, IndexerStatus.RUNNING)); const vm = new VM({ allowAsync: true }); - const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName /** , logEntries */); + const context = this.buildContext(indexerFunction.schema, functionName, blockHeight, hasuraRoleName, logEntries); vm.freeze(block, 'block'); vm.freeze(lakePrimitives, 'primitives'); @@ -167,7 +166,7 @@ export default class Indexer { await vm.run(modifiedFunction); } catch (e) { const error = e as Error; - simultaneousPromises.push(this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Error running IndexerFunction', error.message)); + simultaneousPromises.push(this.writeLogOld(LogLevel.ERROR, functionName, blockHeight, 'Error running IndexerFunction', error.message)); const indexerErrorLogEntry = LogEntry.systemError('Error running IndexerFunction', blockHeight); logEntries.push(indexerErrorLogEntry); throw e; @@ -181,22 +180,22 @@ export default class Indexer { await this.setStatus(functionName, blockHeight, IndexerStatus.FAILING); throw e; } finally { - await Promise.all([...simultaneousPromises/** , (this.deps.indexerMeta as IndexerMeta).writeLogs(logEntries) */]); + await Promise.all([...simultaneousPromises, (this.deps.indexerMeta as IndexerMeta).writeLogs(logEntries)]); } } return allMutations; } - // async getDatabaseConnectionParams(hasuraRoleName: string): Promise { - // const { username, password, database, host, port } = await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; - // return { - // username, - // password, - // database, - // host: this.config.hasuraHostOverride ?? host, - // port: this.config.hasuraPortOverride ?? port - // } - // } + async getDatabaseConnectionParams (hasuraRoleName: string): Promise { + const { username, password, database, host, port } = await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; + return { + username, + password, + database, + host: this.config.hasuraHostOverride ?? host, + port: this.config.hasuraPortOverride ?? port + }; + } enableAwaitTransform (indexerFunction: string): string { return ` @@ -213,7 +212,7 @@ export default class Indexer { ].reduce((acc, val) => val(acc), indexerFunction); } - buildContext (schema: string, functionName: string, blockHeight: number, hasuraRoleName: string /** logEntries: LogEntry[] */): Context { + buildContext (schema: string, functionName: string, blockHeight: number, hasuraRoleName: string, logEntries: LogEntry[]): Context { const functionNameWithoutAccount = functionName.split('/')[1].replace(/[.-]/g, '_'); const schemaName = functionName.replace(/[^a-zA-Z0-9]/g, '_'); return { @@ -243,29 +242,25 @@ export default class Indexer { } }, debug: async (...log) => { - return await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, ...log); - // const debugLogEntry = LogEntry.systemDebug(log.join(' '), blockHeight); - // return await this.writeLog(debugLogEntry, logEntries as LogEntry[], functionName); + const debugLogEntry = LogEntry.systemDebug(log.join(' '), blockHeight); + return await this.writeLog(debugLogEntry, logEntries, functionName); }, log: async (...log) => { - return await this.writeLog(LogLevel.INFO, functionName, blockHeight, ...log); - // const infoLogEntry = LogEntry.systemInfo(log.join(' '), blockHeight); - // return await this.writeLog(infoLogEntry, logEntries as LogEntry[], functionName); + const infoLogEntry = LogEntry.systemInfo(log.join(' '), blockHeight); + return await this.writeLog(infoLogEntry, logEntries, functionName); }, warn: async (...log) => { - return await this.writeLog(LogLevel.WARN, functionName, blockHeight, ...log); - // const warnLogEntry = LogEntry.systemWarn(log.join(' '), blockHeight); - // return await this.writeLog(warnLogEntry, logEntries as LogEntry[], functionName); + const warnLogEntry = LogEntry.systemWarn(log.join(' '), blockHeight); + return await this.writeLog(warnLogEntry, logEntries, functionName); }, error: async (...log) => { - return await this.writeLog(LogLevel.ERROR, functionName, blockHeight, ...log); - // const errorLogEntry = LogEntry.systemError(log.join(' '), blockHeight); - // return await this.writeLog(errorLogEntry, logEntries as LogEntry[], functionName); + const errorLogEntry = LogEntry.systemError(log.join(' '), blockHeight); + return await this.writeLog(errorLogEntry, logEntries, functionName); }, fetchFromSocialApi: async (path, options) => { return await this.deps.fetch(`https://api.near.social${path}`, options); }, - db: this.buildDatabaseContext(functionName, schemaName, schema, blockHeight /** , logEntries as LogEntry[] */) + db: this.buildDatabaseContext(functionName, schemaName, schema, blockHeight, logEntries) }; } @@ -347,7 +342,7 @@ export default class Indexer { schemaName: string, schema: string, blockHeight: number, - // logEntries: LogEntry[], + logEntries: LogEntry[], ): Record any>> { try { const tableNameToDefinitionNamesMapping = this.getTableNameToDefinitionNamesMapping(schema); @@ -373,10 +368,8 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db insert', async (insertSpan: Span) => { try { // Write log before calling insert - await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, - `Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`); - // const insertLogEntry = LogEntry.systemDebug(`Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`, blockHeight); - // await this.writeLog(insertLogEntry, logEntries, functionName); + const insertLogEntry = LogEntry.systemDebug(`Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`, blockHeight); + await this.writeLog(insertLogEntry, logEntries, functionName); // Call insert with parameters return await dmlHandler.insert(schemaName, tableDefinitionNames, Array.isArray(objectsToInsert) ? objectsToInsert : [objectsToInsert]); } finally { @@ -388,10 +381,8 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db select', async (selectSpan: Span) => { try { // Write log before calling select - await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, - `Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`); - // const selectLogEntry = LogEntry.systemDebug(`Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`, blockHeight); - // await this.writeLog(selectLogEntry, logEntries, functionName); + const selectLogEntry = LogEntry.systemDebug(`Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`, blockHeight); + await this.writeLog(selectLogEntry, logEntries, functionName); // Call select with parameters return await dmlHandler.select(schemaName, tableDefinitionNames, filterObj, limit); } finally { @@ -403,10 +394,8 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db update', async (updateSpan: Span) => { try { // Write log before calling update - await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, - `Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`); - // const updateLogEntry = LogEntry.systemDebug(`Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`, blockHeight); - // await this.writeLog(updateLogEntry, logEntries, functionName); + const updateLogEntry = LogEntry.systemDebug(`Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`, blockHeight); + await this.writeLog(updateLogEntry, logEntries, functionName); // Call update with parameters return await dmlHandler.update(schemaName, tableDefinitionNames, filterObj, updateObj); } finally { @@ -418,10 +407,8 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db upsert', async (upsertSpan: Span) => { try { // Write log before calling upsert - await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, - `Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`); - // const upsertLogEntry = LogEntry.systemDebug(`Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`, blockHeight); - // await this.writeLog(upsertLogEntry, logEntries, functionName); + const upsertLogEntry = LogEntry.systemDebug(`Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`, blockHeight); + await this.writeLog(upsertLogEntry, logEntries, functionName); // Call upsert with parameters return await dmlHandler.upsert(schemaName, tableDefinitionNames, Array.isArray(objectsToInsert) ? objectsToInsert : [objectsToInsert], conflictColumns, updateColumns); } finally { @@ -433,10 +420,8 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db delete', async (deleteSpan: Span) => { try { // Write log before calling delete - await this.writeLog(LogLevel.DEBUG, functionName, blockHeight, - `Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`); - // const deleteLogEntry = LogEntry.systemDebug(`Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`, blockHeight); - // await this.writeLog(deleteLogEntry, logEntries, functionName); + const deleteLogEntry = LogEntry.systemDebug(`Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`, blockHeight); + await this.writeLog(deleteLogEntry, logEntries, functionName); // Call delete with parameters return await dmlHandler.delete(schemaName, tableDefinitionNames, filterObj); } finally { @@ -490,15 +475,18 @@ export default class Indexer { } } - // async writeLog (logEntry: LogEntry, logEntries: LogEntry[], functionName: string): Promise { - // logEntries.push(logEntry); - // const { logLevel, blockHeight, message } = logEntry; - // return await this.writeLogOld(logLevel, functionName, blockHeight, message); - // } + async writeLog (logEntry: LogEntry, logEntries: LogEntry[], functionName: string): Promise { + logEntries.push(logEntry); + const { level, blockHeight, message } = logEntry; + if (blockHeight) { + return await this.writeLogOld(level, functionName, blockHeight, message); + } + } - // async callWriteLog (logEntry: LogEntry): Promise { - // await (this.deps.indexerMeta as IndexerMeta).writeLogs([logEntry]); - // } + // onetime use method to allow stream-handler to writeLog into new log table in case of failure + async callWriteLog (logEntry: LogEntry): Promise { + await (this.deps.indexerMeta as IndexerMeta).writeLogs([logEntry]); + } async updateIndexerBlockHeight (functionName: string, blockHeight: number, isHistorical: boolean): Promise { const realTimeMutation: string = ` @@ -542,8 +530,7 @@ export default class Indexer { } } - // todo rename to writeLogOld - async writeLog (logLevel: LogLevel, functionName: string, blockHeight: number, ...message: any[]): Promise { + async writeLogOld (logLevel: LogLevel, functionName: string, blockHeight: number, ...message: any[]): Promise { if (logLevel < this.indexer_behavior.log_level) { return; } diff --git a/runner/src/stream-handler/stream-handler.ts b/runner/src/stream-handler/stream-handler.ts index 9d68cc146..ade513789 100644 --- a/runner/src/stream-handler/stream-handler.ts +++ b/runner/src/stream-handler/stream-handler.ts @@ -4,7 +4,7 @@ import { Worker, isMainThread } from 'worker_threads'; import { registerWorkerMetrics, deregisterWorkerMetrics } from '../metrics'; import Indexer from '../indexer'; import { IndexerStatus } from '../indexer-meta/indexer-meta'; -import { /* LogType, */ LogLevel } from '../indexer-meta/log-entry'; +import LogEntry, { LogLevel } from '../indexer-meta/log-entry'; export interface IndexerConfig { account_id: string @@ -79,15 +79,11 @@ export default class StreamHandler { console.error(`Failed to set status STOPPED for stream: ${this.streamKey}`, e); }); + const streamErrorLogEntry = LogEntry.systemError(`Encountered error processing stream: ${this.streamKey}, terminating thread\n${error.toString()}`, this.executorContext.block_height); + Promise.all([ - indexer.writeLog(LogLevel.ERROR, functionName, this.executorContext.block_height, `Encountered error processing stream: ${this.streamKey}, terminating thread\n${error.toString()}`), - // indexer.callWriteLog({ - // blockHeight: this.executorContext.block_height, - // logTimestamp: new Date(), - // logType: LogType.SYSTEM, - // logLevel: LogLevel.ERROR, - // message: `Encountered error processing stream: ${this.streamKey}, terminating thread\n${error.toString()}` - // }) + indexer.writeLogOld(LogLevel.ERROR, functionName, this.executorContext.block_height, `Encountered error processing stream: ${this.streamKey}, terminating thread\n${error.toString()}`), + indexer.callWriteLog(streamErrorLogEntry), ]).catch((e) => { console.error(`Failed to write log for stream: ${this.streamKey}`, e); }); diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 3dde48358..fbbe6ce50 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -6,7 +6,6 @@ import Indexer from '../src/indexer'; import HasuraClient from '../src/hasura-client'; import Provisioner from '../src/provisioner'; import PgClient from '../src/pg-client'; -// import IndexerMeta from '../src/indexer-meta/indexer-meta'; import { HasuraGraphQLContainer, type StartedHasuraGraphQLContainer } from './testcontainers/hasura'; import { PostgreSqlContainer, type StartedPostgreSqlContainer } from './testcontainers/postgres'; @@ -72,22 +71,12 @@ describe('Indexer integration', () => { hasuraPortOverride: Number(postgresContainer.getPort()), } ); - // const userDB = await provisioner.getDatabaseConnectionParameters('morgs_near'); - - // const indexerMeta = new IndexerMeta('morgs_near', LogLevel.INFO, { - // host: postgresContainer.getIpAddress(), - // port: Number(postgresContainer.getPort()), - // database: userDB.database, - // username: userDB.username, - // password: userDB.password - // }, pgClient); const indexer = new Indexer( { log_level: LogLevel.INFO, }, { - // indexerMeta, provisioner }, undefined, From e8de5b86647039284823715da4a014a462d275c6 Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 17:02:06 -0400 Subject: [PATCH 28/39] chore: commented out unused code --- runner/src/indexer/indexer.ts | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 307d569a3..9244cb74b 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -5,7 +5,7 @@ import { Parser } from 'node-sql-parser'; import Provisioner from '../provisioner'; import DmlHandler from '../dml-handler/dml-handler'; -import LogEntry, { LogLevel } from '../indexer-meta/log-entry'; +import /**LogEntry,*/ { LogLevel } from '../indexer-meta/log-entry'; import { type IndexerBehavior } from '../stream-handler/stream-handler'; import /** IndexerMeta, */ { IndexerStatus } from '../indexer-meta/indexer-meta'; @@ -96,7 +96,7 @@ export default class Indexer { const simultaneousPromises: Array> = []; const allMutations: string[] = []; - const logEntries: LogEntry[] = []; + // const logEntries: LogEntry[] = []; for (const functionName in functions) { try { @@ -130,8 +130,8 @@ export default class Indexer { } } - const runningLogEntry = LogEntry.systemInfo(runningMessage, blockHeight); - logEntries.push(runningLogEntry); + // const runningLogEntry = LogEntry.systemInfo(runningMessage, blockHeight); + // logEntries.push(runningLogEntry); // Cache database credentials after provisioning const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { @@ -168,8 +168,8 @@ export default class Indexer { } catch (e) { const error = e as Error; simultaneousPromises.push(this.writeLog(LogLevel.ERROR, functionName, blockHeight, 'Error running IndexerFunction', error.message)); - const indexerErrorLogEntry = LogEntry.systemError('Error running IndexerFunction', blockHeight); - logEntries.push(indexerErrorLogEntry); + // const indexerErrorLogEntry = LogEntry.systemError('Error running IndexerFunction', blockHeight); + // logEntries.push(indexerErrorLogEntry); throw e; } finally { runIndexerCodeSpan.end(); From 5efbc81e0652d30446e182981b9dac1a555be611 Mon Sep 17 00:00:00 2001 From: kevin Date: Wed, 10 Apr 2024 18:10:21 -0400 Subject: [PATCH 29/39] add nullish coalescing to getDatabaseConnectionParameters --- runner/src/indexer/indexer.test.ts | 2 +- runner/src/indexer/indexer.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 374d5799f..370c9d7ab 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -1017,7 +1017,7 @@ CREATE TABLE await indexer.runFunctions(mockBlock, functions, false, { provision: true }); expect(provisioner.provisionUserApi).not.toHaveBeenCalled(); - expect(provisioner.getDatabaseConnectionParameters).toHaveBeenCalledTimes(3); // todo: temporary fix for the test to ensure new logging works + expect(provisioner.getDatabaseConnectionParameters).toHaveBeenCalledTimes(1); }); test('Indexer.runFunctions() supplies the required role to the GraphQL endpoint', async () => { diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 54ca0e9cc..e87d3d359 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -135,7 +135,7 @@ export default class Indexer { const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { // this.database_connection_parameters ??= await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; - this.database_connection_parameters = await this.getDatabaseConnectionParams(hasuraRoleName); + this.database_connection_parameters ??= await this.getDatabaseConnectionParams(hasuraRoleName); this.deps.indexerMeta ??= new IndexerMeta(functionName, this.indexer_behavior.log_level, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); } catch (e) { From d68a242fe49a332d613e7d03be02a5e8a7864f79 Mon Sep 17 00:00:00 2001 From: kevin Date: Sun, 14 Apr 2024 15:02:11 -0400 Subject: [PATCH 30/39] feat: added unit test to check logs, removed unused line --- runner/src/indexer/indexer.test.ts | 49 ++++++++++++++++++++++++++++++ runner/src/indexer/indexer.ts | 1 - 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/runner/src/indexer/indexer.test.ts b/runner/src/indexer/indexer.test.ts index 370c9d7ab..1c0e741d5 100644 --- a/runner/src/indexer/indexer.test.ts +++ b/runner/src/indexer/indexer.test.ts @@ -1248,4 +1248,53 @@ CREATE TABLE } ]); }); + + it('call writeLogs method at the end of execution with correct and all logs are present', async () => { + const mockFetchDebug = jest.fn(() => ({ + status: 200, + json: async () => ({ + errors: null, + }), + })); + + const blockHeight = 456; + const mockBlock = Block.fromStreamerMessage({ + block: { + chunks: [], + header: { + height: blockHeight + } + }, + shards: {} + } as unknown as StreamerMessage) as unknown as Block; + + const indexerMeta: any = { + writeLogs: jest.fn(), + }; + + const functions: Record = {}; + functions['buildnear.testnet/test'] = { + code: ` + console.debug('debug log'); + console.log('info log'); + console.error('error log'); + await context.db.Posts.select({ + account_id: 'morgs_near', + receipt_id: 'abc', + }); + `, + schema: SIMPLE_SCHEMA + }; + + const indexerDebug = new Indexer( + { log_level: LogLevel.DEBUG }, + { fetch: mockFetchDebug as unknown as typeof fetch, provisioner: genericProvisioner, dmlHandler: genericMockDmlHandler, indexerMeta }, + undefined, + config + ); + + await indexerDebug.runFunctions(mockBlock, functions, false); + expect(indexerMeta.writeLogs).toHaveBeenCalledTimes(1); + expect(indexerMeta.writeLogs.mock.calls[0][0]).toHaveLength(5); + }); }); diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index e87d3d359..7661d33f1 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -134,7 +134,6 @@ export default class Indexer { // Cache database credentials after provisioning const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { - // this.database_connection_parameters ??= await this.deps.provisioner.getDatabaseConnectionParameters(hasuraRoleName) as DatabaseConnectionParameters; this.database_connection_parameters ??= await this.getDatabaseConnectionParams(hasuraRoleName); this.deps.indexerMeta ??= new IndexerMeta(functionName, this.indexer_behavior.log_level, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); From d6285fe577550287abd0096bcd08376b798767a3 Mon Sep 17 00:00:00 2001 From: kevin Date: Sun, 14 Apr 2024 15:18:59 -0400 Subject: [PATCH 31/39] feat: added integration test for logs asserting log_entries are equal to new _logs --- runner/tests/integration.test.ts | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 3dde48358..64c36850d 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -6,7 +6,6 @@ import Indexer from '../src/indexer'; import HasuraClient from '../src/hasura-client'; import Provisioner from '../src/provisioner'; import PgClient from '../src/pg-client'; -// import IndexerMeta from '../src/indexer-meta/indexer-meta'; import { HasuraGraphQLContainer, type StartedHasuraGraphQLContainer } from './testcontainers/hasura'; import { PostgreSqlContainer, type StartedPostgreSqlContainer } from './testcontainers/postgres'; @@ -72,22 +71,12 @@ describe('Indexer integration', () => { hasuraPortOverride: Number(postgresContainer.getPort()), } ); - // const userDB = await provisioner.getDatabaseConnectionParameters('morgs_near'); - // const indexerMeta = new IndexerMeta('morgs_near', LogLevel.INFO, { - // host: postgresContainer.getIpAddress(), - // port: Number(postgresContainer.getPort()), - // database: userDB.database, - // username: userDB.username, - // password: userDB.password - // }, pgClient); - const indexer = new Indexer( { log_level: LogLevel.INFO, }, { - // indexerMeta, provisioner }, undefined, @@ -190,5 +179,15 @@ describe('Indexer integration', () => { `); expect(logs.length).toEqual(4); + + const { morgs_near_test___logs: _logs }: any = await graphqlClient.request(gql` + query { + morgs_near_test___logs { + message + } + } + `); + + expect(_logs.length).toEqual(4); }); }); From d5f87de9407701d7149b7203ec4782aa8dc7eb39 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 15 Apr 2024 17:05:15 -0400 Subject: [PATCH 32/39] fix: corrected param for indexerMeta instantiation --- runner/src/indexer-meta/indexer-meta.ts | 1 - runner/src/indexer/indexer.ts | 3 +- runner/src/test-client.ts | 89 +++++++++++++++++++++++++ 3 files changed, 91 insertions(+), 2 deletions(-) create mode 100644 runner/src/test-client.ts diff --git a/runner/src/indexer-meta/indexer-meta.ts b/runner/src/indexer-meta/indexer-meta.ts index 9c8edc485..cd7cb4207 100644 --- a/runner/src/indexer-meta/indexer-meta.ts +++ b/runner/src/indexer-meta/indexer-meta.ts @@ -58,7 +58,6 @@ export default class IndexerMeta { LogLevel[entry.level], entry.message ]); - const query = format(this.logInsertQueryTemplate, this.schemaName, values); await this.pgClient.query(query); }, `Failed to insert ${entriesArray.length > 1 ? 'logs' : 'log'} into the ${this.schemaName}.__logs table`) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 980e020f9..08561514c 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -115,7 +115,8 @@ export default class Indexer { const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { this.database_connection_parameters ??= await this.deps.provisioner.getPgBouncerConnectionParameters(this.indexerConfig.hasuraRoleName()); - this.deps.indexerMeta ??= new IndexerMeta(this.indexerConfig.functionName, this.indexerConfig.logLevel, this.database_connection_parameters); + + this.deps.indexerMeta ??= new IndexerMeta(this.indexerConfig.schemaName(), this.indexerConfig.logLevel, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); } catch (e) { const error = e as Error; diff --git a/runner/src/test-client.ts b/runner/src/test-client.ts new file mode 100644 index 000000000..d08f769e1 --- /dev/null +++ b/runner/src/test-client.ts @@ -0,0 +1,89 @@ +// Run with 'npx ts-node src/test-client.ts' // located at +import runnerClient from './server/runner-client'; +const schema = ` +CREATE TABLE +versions ( + "id" SERIAL PRIMARY KEY, + "block_height" BIGINT NOT NULL, + "block_timestamp_ms" BIGINT NOT NULL, + "code" VARCHAR NOT NULL, + "component_author_id" VARCHAR NOT NULL, + "component_name" VARCHAR NOT NULL, + "lines_added" INT NOT NULL, + "lines_removed" INT NOT NULL, + "receipt_id" VARCHAR NOT NULL +); +CREATE TABLE +metadata ( + "component_id" VARCHAR PRIMARY KEY, + "block_height" BIGINT NOT NULL, + "block_timestamp_ms" BIGINT NOT NULL, + "code" VARCHAR NOT NULL, + "component_author_id" VARCHAR NOT NULL, + "component_name" VARCHAR NOT NULL, + "star_count" INT NOT NULL, + "fork_count" INT NOT NULL, + "name" VARCHAR, + "image_ipfs_cid" VARCHAR, + "description" VARCHAR, + "fork_of_source" VARCHAR, + "fork_of_block_height" BIGINT, + "tags" VARCHAR, + "website" VARCHAR +); +`; +const code = ` +const h = block.header().height; +const blockTimestampMs = Math.floor( + Number(block.header().timestampNanosec) / 1e6 +); +const code = 'console.log("hello world")'; +const componentAuthorId = 'kevin0.near'; +const componentName = 'test_component_1'; +const linesAdded = 1; +const linesRemoved = 1; +receiptId = '3WGZ91JVF2kxF54SryuktCCmH2kgijuGM9P3uoqSGs5s' +await console.debug('debug log'); +await console.log('info log'); +await console.log('Hello this is some indexer log'); +await console.error('error log'); +// await context.db.Metadata.insert( +// {block_height: h, block_timestamp_ms: blockTimestampMs, code, component_author_id: componentAuthorId, component_name: componentName, star_count: 0, fork_count: 0, name: 'test', image_ipfs_cid: 'test', description: 'test', fork_of_source: 'test', fork_of_block_height: 0, tags: 'test', website: 'test'} +// ); +`; +const indexer = { + account_id: 'kevin21.near', + redis_stream: 'test:block_stream', + function_name: 'component_01', + code, + start_block_height: 113448278, + schema, + provisioned: true, + indexer_rule: { + indexer_rule_kind: 'Action', + matching_rule: { + rule: 'ACTION_ANY', + affected_account_id: 'social.near', + status: 'SUCCESS' + }, + id: null, + name: null + } +}; +void (async function main () { + // console.log(indexer.redis_stream, indexer.account_id, indexer.function_name, indexer.code, indexer.schema) + runnerClient.StartExecutor({ + redisStream: indexer.redis_stream, + accountId: indexer.account_id, + functionName: indexer.function_name, + code: indexer.code, + schema: indexer.schema + }, (err, response) => { + if (err) { + } else { + console.log('start: ', response); + console.log('running...'); + } + }); + console.log('done'); +})(); From 6ed96aa4d089df7a3f2ed5acf4f55bc111be1bed Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 15 Apr 2024 17:29:33 -0400 Subject: [PATCH 33/39] fix: use IndexerConfig for indexerMeta --- runner/src/indexer-meta/indexer-meta.test.ts | 24 +++--- runner/src/indexer-meta/indexer-meta.ts | 8 +- runner/src/indexer/indexer.ts | 2 +- runner/src/test-client.ts | 89 -------------------- 4 files changed, 18 insertions(+), 105 deletions(-) delete mode 100644 runner/src/test-client.ts diff --git a/runner/src/indexer-meta/indexer-meta.test.ts b/runner/src/indexer-meta/indexer-meta.test.ts index 673cdab0b..a0294b740 100644 --- a/runner/src/indexer-meta/indexer-meta.test.ts +++ b/runner/src/indexer-meta/indexer-meta.test.ts @@ -3,6 +3,7 @@ import IndexerMeta, { IndexerStatus } from './indexer-meta'; import type PgClient from '../pg-client'; import LogEntry, { LogLevel } from './log-entry'; import { type PostgresConnectionParams } from '../pg-client'; +import IndexerConfig from '../indexer-config/indexer-config'; describe('IndexerMeta', () => { let genericMockPgClient: PgClient; @@ -23,8 +24,9 @@ describe('IndexerMeta', () => { port: 5432, database: 'test_database' }; - const functionName = 'some_account/some_indexer'; - const schemaName = functionName.replace(/[^a-zA-Z0-9]/g, '_'); + + const indexerConfig = new IndexerConfig('', '', 'some_account/some_indexer', 0, '', '', LogLevel.INFO); + const schemaName = indexerConfig.schemaName(); describe('writeLog', () => { it('should insert a single log entry into the database', async () => { @@ -32,7 +34,7 @@ describe('IndexerMeta', () => { jest.useFakeTimers({ now: date.getTime() }); const formattedDate = date.toISOString().replace('T', ' ').replace('Z', '+00'); - const indexerMeta = new IndexerMeta(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const infoEntry = LogEntry.systemInfo('Info message'); await indexerMeta.writeLogs([infoEntry]); @@ -45,7 +47,7 @@ describe('IndexerMeta', () => { jest.useFakeTimers({ now: date.getTime() }); const formattedDate = date.toISOString().replace('T', ' ').replace('Z', '+00'); - const indexerMeta = new IndexerMeta(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const errorEntry = LogEntry.systemError('Error message', 12345); await indexerMeta.writeLogs([errorEntry]); @@ -56,13 +58,13 @@ describe('IndexerMeta', () => { it('should handle errors when inserting a single log entry', async () => { query.mockRejectedValueOnce(new Error('Failed to insert log')); - const indexerMeta = new IndexerMeta(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const errorEntry = LogEntry.systemError('Error message', 12345); await expect(indexerMeta.writeLogs([errorEntry])).rejects.toThrow('Failed to insert log'); }); it('should insert a batch of log entries into the database', async () => { - const indexerMeta = new IndexerMeta(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const debugEntry = LogEntry.systemDebug('Debug message'); const infoEntry = LogEntry.systemInfo('Information message'); const logEntries: LogEntry[] = [ @@ -79,7 +81,7 @@ describe('IndexerMeta', () => { it('should handle errors when inserting a batch of log entries', async () => { query.mockRejectedValueOnce(new Error('Failed to insert batch of logs')); - const indexerMeta = new IndexerMeta(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const debugEntry = LogEntry.systemDebug('Debug message'); const infoEntry = LogEntry.systemInfo('Information message'); const logEntries: LogEntry[] = [ @@ -91,7 +93,7 @@ describe('IndexerMeta', () => { }); it('should handle empty log entry', async () => { - const indexerMeta = new IndexerMeta(functionName, LogLevel.INFO, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const logEntries: LogEntry[] = []; await indexerMeta.writeLogs(logEntries); @@ -99,7 +101,7 @@ describe('IndexerMeta', () => { }); it('should skip log entries with levels lower than the logging level specified in the constructor', async () => { - const indexerMeta = new IndexerMeta(functionName, LogLevel.ERROR, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); const debugEntry = LogEntry.systemDebug('Debug message'); await indexerMeta.writeLogs([debugEntry]); @@ -108,7 +110,7 @@ describe('IndexerMeta', () => { }); it('writes status for indexer', async () => { - const indexerMeta = new IndexerMeta(functionName, 5, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); await indexerMeta.setStatus(IndexerStatus.RUNNING); expect(query).toBeCalledWith( `INSERT INTO ${schemaName}.__metadata (attribute, value) VALUES ('STATUS', 'RUNNING') ON CONFLICT (attribute) DO UPDATE SET value = EXCLUDED.value RETURNING *` @@ -116,7 +118,7 @@ describe('IndexerMeta', () => { }); it('writes last processed block height for indexer', async () => { - const indexerMeta = new IndexerMeta(functionName, 5, mockDatabaseConnectionParameters, genericMockPgClient); + const indexerMeta = new IndexerMeta(indexerConfig, mockDatabaseConnectionParameters, genericMockPgClient); await indexerMeta.updateBlockheight(123); expect(query).toBeCalledWith( `INSERT INTO ${schemaName}.__metadata (attribute, value) VALUES ('LAST_PROCESSED_BLOCK_HEIGHT', '123') ON CONFLICT (attribute) DO UPDATE SET value = EXCLUDED.value RETURNING *` diff --git a/runner/src/indexer-meta/indexer-meta.ts b/runner/src/indexer-meta/indexer-meta.ts index cd7cb4207..a845cf7af 100644 --- a/runner/src/indexer-meta/indexer-meta.ts +++ b/runner/src/indexer-meta/indexer-meta.ts @@ -4,6 +4,7 @@ import PgClient, { type PostgresConnectionParams } from '../pg-client'; import { trace } from '@opentelemetry/api'; import type LogEntry from './log-entry'; import { LogLevel } from './log-entry'; +import type IndexerConfig from '../indexer-config/indexer-config'; export enum IndexerStatus { PROVISIONING = 'PROVISIONING', @@ -25,16 +26,15 @@ export default class IndexerMeta { private readonly loggingLevel: number; constructor ( - functionName: string, - loggingLevel: number, + indexerConfig: IndexerConfig, databaseConnectionParameters: PostgresConnectionParams, pgClientInstance: PgClient | undefined = undefined ) { const pgClient = pgClientInstance ?? new PgClient(databaseConnectionParameters); this.pgClient = pgClient; - this.schemaName = functionName.replace(/[^a-zA-Z0-9]/g, '_'); - this.loggingLevel = loggingLevel; + this.schemaName = indexerConfig.schemaName(); + this.loggingLevel = indexerConfig.logLevel; } private shouldLog (logLevel: LogLevel): boolean { diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 08561514c..9594aa63d 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -116,7 +116,7 @@ export default class Indexer { try { this.database_connection_parameters ??= await this.deps.provisioner.getPgBouncerConnectionParameters(this.indexerConfig.hasuraRoleName()); - this.deps.indexerMeta ??= new IndexerMeta(this.indexerConfig.schemaName(), this.indexerConfig.logLevel, this.database_connection_parameters); + this.deps.indexerMeta ??= new IndexerMeta(this.indexerConfig, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); } catch (e) { const error = e as Error; diff --git a/runner/src/test-client.ts b/runner/src/test-client.ts deleted file mode 100644 index d08f769e1..000000000 --- a/runner/src/test-client.ts +++ /dev/null @@ -1,89 +0,0 @@ -// Run with 'npx ts-node src/test-client.ts' // located at -import runnerClient from './server/runner-client'; -const schema = ` -CREATE TABLE -versions ( - "id" SERIAL PRIMARY KEY, - "block_height" BIGINT NOT NULL, - "block_timestamp_ms" BIGINT NOT NULL, - "code" VARCHAR NOT NULL, - "component_author_id" VARCHAR NOT NULL, - "component_name" VARCHAR NOT NULL, - "lines_added" INT NOT NULL, - "lines_removed" INT NOT NULL, - "receipt_id" VARCHAR NOT NULL -); -CREATE TABLE -metadata ( - "component_id" VARCHAR PRIMARY KEY, - "block_height" BIGINT NOT NULL, - "block_timestamp_ms" BIGINT NOT NULL, - "code" VARCHAR NOT NULL, - "component_author_id" VARCHAR NOT NULL, - "component_name" VARCHAR NOT NULL, - "star_count" INT NOT NULL, - "fork_count" INT NOT NULL, - "name" VARCHAR, - "image_ipfs_cid" VARCHAR, - "description" VARCHAR, - "fork_of_source" VARCHAR, - "fork_of_block_height" BIGINT, - "tags" VARCHAR, - "website" VARCHAR -); -`; -const code = ` -const h = block.header().height; -const blockTimestampMs = Math.floor( - Number(block.header().timestampNanosec) / 1e6 -); -const code = 'console.log("hello world")'; -const componentAuthorId = 'kevin0.near'; -const componentName = 'test_component_1'; -const linesAdded = 1; -const linesRemoved = 1; -receiptId = '3WGZ91JVF2kxF54SryuktCCmH2kgijuGM9P3uoqSGs5s' -await console.debug('debug log'); -await console.log('info log'); -await console.log('Hello this is some indexer log'); -await console.error('error log'); -// await context.db.Metadata.insert( -// {block_height: h, block_timestamp_ms: blockTimestampMs, code, component_author_id: componentAuthorId, component_name: componentName, star_count: 0, fork_count: 0, name: 'test', image_ipfs_cid: 'test', description: 'test', fork_of_source: 'test', fork_of_block_height: 0, tags: 'test', website: 'test'} -// ); -`; -const indexer = { - account_id: 'kevin21.near', - redis_stream: 'test:block_stream', - function_name: 'component_01', - code, - start_block_height: 113448278, - schema, - provisioned: true, - indexer_rule: { - indexer_rule_kind: 'Action', - matching_rule: { - rule: 'ACTION_ANY', - affected_account_id: 'social.near', - status: 'SUCCESS' - }, - id: null, - name: null - } -}; -void (async function main () { - // console.log(indexer.redis_stream, indexer.account_id, indexer.function_name, indexer.code, indexer.schema) - runnerClient.StartExecutor({ - redisStream: indexer.redis_stream, - accountId: indexer.account_id, - functionName: indexer.function_name, - code: indexer.code, - schema: indexer.schema - }, (err, response) => { - if (err) { - } else { - console.log('start: ', response); - console.log('running...'); - } - }); - console.log('done'); -})(); From 11dfe358f6beccacfff5a51a8daf5733b0b065d1 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 15 Apr 2024 17:49:55 -0400 Subject: [PATCH 34/39] searching for provisioning endpoints --- runner/tests/integration.test.ts | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index fe1371141..8c5d59cec 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -150,14 +150,25 @@ describe('Indexer integration', () => { expect(logs.length).toEqual(4); const { morgs_near_test___logs: _logs }: any = await graphqlClient.request(gql` - query { - morgs_near_test___logs { - message + query { + morgs_near_test___logs { + message + } } - } - `); + `); + + expect(_logs.length).toEqual(4); + + const { morgs_near_test___logs: provisioning_endpoints }: any = await graphqlClient.request(gql` + query MyQuery($searchString: [String!]) { + morgs_near_test___logs(where: {message: {_ilike: "%Provisioning endpoint%"}}) { + message + } + } + `); + + expect(provisioning_endpoints.length).toEqual(2); -expect(_logs.length).toEqual(4); }); it('test context db', async () => { From b960b2c8e37e6c198cdf3b02ee613f3db09cf8b4 Mon Sep 17 00:00:00 2001 From: kevin Date: Mon, 15 Apr 2024 17:55:42 -0400 Subject: [PATCH 35/39] fix: check run functions --- runner/tests/integration.test.ts | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 8c5d59cec..9bcfff805 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -160,7 +160,7 @@ describe('Indexer integration', () => { expect(_logs.length).toEqual(4); const { morgs_near_test___logs: provisioning_endpoints }: any = await graphqlClient.request(gql` - query MyQuery($searchString: [String!]) { + query { morgs_near_test___logs(where: {message: {_ilike: "%Provisioning endpoint%"}}) { message } @@ -169,6 +169,16 @@ describe('Indexer integration', () => { expect(provisioning_endpoints.length).toEqual(2); + const { morgs_near_test___logs: running_function_enpoint }: any = await graphqlClient.request(gql` + query { + morgs_near_test___logs(where: {message: {_ilike: "%Running function%"}}) { + message + } + } + `); + + expect(running_function_enpoint.length).toEqual(2); + }); it('test context db', async () => { From a3a040e0a16ee006a922a1eb2a4874281e3e8925 Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 16 Apr 2024 11:40:38 -0400 Subject: [PATCH 36/39] chore: renamed log table --- runner/src/indexer/indexer.ts | 1 - runner/tests/integration.test.ts | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index 9594aa63d..c92069701 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -115,7 +115,6 @@ export default class Indexer { const credentialsFetchSpan = this.tracer.startSpan('fetch database connection parameters'); try { this.database_connection_parameters ??= await this.deps.provisioner.getPgBouncerConnectionParameters(this.indexerConfig.hasuraRoleName()); - this.deps.indexerMeta ??= new IndexerMeta(this.indexerConfig, this.database_connection_parameters); this.deps.dmlHandler ??= new DmlHandler(this.database_connection_parameters); } catch (e) { diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 9bcfff805..256d0982f 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -139,7 +139,7 @@ describe('Indexer integration', () => { expect(state.current_block_height).toEqual(115185109); expect(state.status).toEqual('RUNNING'); - const { indexer_log_entries: logs }: any = await graphqlClient.request(gql` + const { indexer_log_entries: old_logs }: any = await graphqlClient.request(gql` query { indexer_log_entries(where: { function_name: { _eq:"morgs.near/test" } }) { message @@ -147,9 +147,9 @@ describe('Indexer integration', () => { } `); - expect(logs.length).toEqual(4); + expect(old_logs.length).toEqual(4); - const { morgs_near_test___logs: _logs }: any = await graphqlClient.request(gql` + const { morgs_near_test___logs: logs }: any = await graphqlClient.request(gql` query { morgs_near_test___logs { message @@ -157,7 +157,7 @@ describe('Indexer integration', () => { } `); - expect(_logs.length).toEqual(4); + expect(logs.length).toEqual(4); const { morgs_near_test___logs: provisioning_endpoints }: any = await graphqlClient.request(gql` query { From 845cfacf691b05b1dfdc849633ae53f2406fcd04 Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 16 Apr 2024 11:41:21 -0400 Subject: [PATCH 37/39] chore: spacing --- runner/tests/integration.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/runner/tests/integration.test.ts b/runner/tests/integration.test.ts index 256d0982f..df9c6944c 100644 --- a/runner/tests/integration.test.ts +++ b/runner/tests/integration.test.ts @@ -305,4 +305,4 @@ describe('Indexer integration', () => { `); expect(totalRows.length).toEqual(3); // Two inserts, and the overwritten upsert }); -}); \ No newline at end of file +}); From 1ead54b9ce03a9116618a36948e8271ae76ad3d9 Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 16 Apr 2024 13:54:16 -0400 Subject: [PATCH 38/39] set systemLogs in buildDB context to user --- runner/src/indexer/indexer.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/runner/src/indexer/indexer.ts b/runner/src/indexer/indexer.ts index c92069701..d3a554e43 100644 --- a/runner/src/indexer/indexer.ts +++ b/runner/src/indexer/indexer.ts @@ -315,7 +315,7 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db insert', async (insertSpan: Span) => { try { // Write log before calling insert - const insertLogEntry = LogEntry.systemDebug(`Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`, blockHeight); + const insertLogEntry = LogEntry.userDebug(`Inserting object ${JSON.stringify(objectsToInsert)} into table ${tableName}`, blockHeight); await this.writeLog(insertLogEntry, logEntries); // Call insert with parameters return await dmlHandler.insert(this.indexerConfig.schemaName(), tableDefinitionNames, Array.isArray(objectsToInsert) ? objectsToInsert : [objectsToInsert]); @@ -328,7 +328,7 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db select', async (selectSpan: Span) => { try { // Write log before calling select - const selectLogEntry = LogEntry.systemDebug(`Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`, blockHeight); + const selectLogEntry = LogEntry.userDebug(`Selecting objects in table ${tableName} with values ${JSON.stringify(filterObj)} with ${limit === null ? 'no' : limit} limit`, blockHeight); await this.writeLog(selectLogEntry, logEntries); // Call select with parameters return await dmlHandler.select(this.indexerConfig.schemaName(), tableDefinitionNames, filterObj, limit); @@ -341,7 +341,7 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db update', async (updateSpan: Span) => { try { // Write log before calling update - const updateLogEntry = LogEntry.systemDebug(`Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`, blockHeight); + const updateLogEntry = LogEntry.userDebug(`Updating objects in table ${tableName} that match ${JSON.stringify(filterObj)} with values ${JSON.stringify(updateObj)}`, blockHeight); await this.writeLog(updateLogEntry, logEntries); // Call update with parameters return await dmlHandler.update(this.indexerConfig.schemaName(), tableDefinitionNames, filterObj, updateObj); @@ -354,7 +354,7 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db upsert', async (upsertSpan: Span) => { try { // Write log before calling upsert - const upsertLogEntry = LogEntry.systemDebug(`Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`, blockHeight); + const upsertLogEntry = LogEntry.userDebug(`Inserting objects into table ${tableName} with values ${JSON.stringify(objectsToInsert)}. Conflict on columns ${conflictColumns.join(', ')} will update values in columns ${updateColumns.join(', ')}`, blockHeight); await this.writeLog(upsertLogEntry, logEntries); // Call upsert with parameters return await dmlHandler.upsert(this.indexerConfig.schemaName(), tableDefinitionNames, Array.isArray(objectsToInsert) ? objectsToInsert : [objectsToInsert], conflictColumns, updateColumns); @@ -367,7 +367,7 @@ export default class Indexer { return await this.tracer.startActiveSpan('Call context db delete', async (deleteSpan: Span) => { try { // Write log before calling delete - const deleteLogEntry = LogEntry.systemDebug(`Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`, blockHeight); + const deleteLogEntry = LogEntry.userDebug(`Deleting objects from table ${tableName} with values ${JSON.stringify(filterObj)}`, blockHeight); await this.writeLog(deleteLogEntry, logEntries); // Call delete with parameters return await dmlHandler.delete(this.indexerConfig.schemaName(), tableDefinitionNames, filterObj); From 4e718d6ac51487e5ce31473c1c5e4dabddfabe78 Mon Sep 17 00:00:00 2001 From: kevin Date: Tue, 16 Apr 2024 14:16:15 -0400 Subject: [PATCH 39/39] fix: use of IndexerConfig in indexerMeta --- runner/src/indexer-meta/indexer-meta.test.ts | 2 +- runner/src/indexer-meta/indexer-meta.ts | 22 ++++++++++---------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/runner/src/indexer-meta/indexer-meta.test.ts b/runner/src/indexer-meta/indexer-meta.test.ts index a0294b740..ee8da06b4 100644 --- a/runner/src/indexer-meta/indexer-meta.test.ts +++ b/runner/src/indexer-meta/indexer-meta.test.ts @@ -25,7 +25,7 @@ describe('IndexerMeta', () => { database: 'test_database' }; - const indexerConfig = new IndexerConfig('', '', 'some_account/some_indexer', 0, '', '', LogLevel.INFO); + const indexerConfig = new IndexerConfig('', 'some-account', 'some-indexer', 0, '', '', LogLevel.INFO); const schemaName = indexerConfig.schemaName(); describe('writeLog', () => { diff --git a/runner/src/indexer-meta/indexer-meta.ts b/runner/src/indexer-meta/indexer-meta.ts index a845cf7af..3ce3f17e1 100644 --- a/runner/src/indexer-meta/indexer-meta.ts +++ b/runner/src/indexer-meta/indexer-meta.ts @@ -21,9 +21,8 @@ export default class IndexerMeta { tracer = trace.getTracer('queryapi-runner-indexer-logger'); private readonly pgClient: PgClient; - private readonly schemaName: string; + private readonly indexerConfig: IndexerConfig; private readonly logInsertQueryTemplate: string = 'INSERT INTO %I.__logs (block_height, date, timestamp, type, level, message) VALUES %L'; - private readonly loggingLevel: number; constructor ( indexerConfig: IndexerConfig, @@ -33,12 +32,11 @@ export default class IndexerMeta { const pgClient = pgClientInstance ?? new PgClient(databaseConnectionParameters); this.pgClient = pgClient; - this.schemaName = indexerConfig.schemaName(); - this.loggingLevel = indexerConfig.logLevel; + this.indexerConfig = indexerConfig; } private shouldLog (logLevel: LogLevel): boolean { - return logLevel >= this.loggingLevel; + return logLevel >= this.indexerConfig.logLevel; } async writeLogs ( @@ -49,6 +47,7 @@ export default class IndexerMeta { const spanMessage = `write log for ${entriesArray.length === 1 ? 'single entry' : `batch of ${entriesArray.length}`} through postgres `; const writeLogSpan = this.tracer.startSpan(spanMessage); + await wrapError(async () => { const values = entriesArray.map(entry => [ entry.blockHeight, @@ -58,9 +57,10 @@ export default class IndexerMeta { LogLevel[entry.level], entry.message ]); - const query = format(this.logInsertQueryTemplate, this.schemaName, values); + + const query = format(this.logInsertQueryTemplate, this.indexerConfig.schemaName(), values); await this.pgClient.query(query); - }, `Failed to insert ${entriesArray.length > 1 ? 'logs' : 'log'} into the ${this.schemaName}.__logs table`) + }, `Failed to insert ${entriesArray.length > 1 ? 'logs' : 'log'} into the ${this.indexerConfig.schemaName()}.__logs table`) .finally(() => { writeLogSpan.end(); }); @@ -69,10 +69,10 @@ export default class IndexerMeta { async setStatus (status: IndexerStatus): Promise { const setStatusSpan = this.tracer.startSpan(`set status of indexer to ${status} through postgres`); const values = [[STATUS_ATTRIBUTE, status]]; - const query = format(METADATA_TABLE_UPSERT, this.schemaName, values); + const query = format(METADATA_TABLE_UPSERT, this.indexerConfig.schemaName(), values); try { - await wrapError(async () => await this.pgClient.query(query), `Failed to update status for ${this.schemaName}`); + await wrapError(async () => await this.pgClient.query(query), `Failed to update status for ${this.indexerConfig.schemaName()}`); } finally { setStatusSpan.end(); } @@ -81,10 +81,10 @@ export default class IndexerMeta { async updateBlockheight (blockHeight: number): Promise { const setLastProcessedBlockSpan = this.tracer.startSpan(`set last processed block to ${blockHeight} through postgres`); const values = [[LAST_PROCESSED_BLOCK_HEIGHT_ATTRIBUTE, blockHeight.toString()]]; - const query = format(METADATA_TABLE_UPSERT, this.schemaName, values); + const query = format(METADATA_TABLE_UPSERT, this.indexerConfig.schemaName(), values); try { - await wrapError(async () => await this.pgClient.query(query), `Failed to update last processed block height for ${this.schemaName}`); + await wrapError(async () => await this.pgClient.query(query), `Failed to update last processed block height for ${this.indexerConfig.schemaName()}`); } finally { setLastProcessedBlockSpan.end(); }