diff --git a/yarn-project/archiver/src/archiver/archiver.test.ts b/yarn-project/archiver/src/archiver/archiver.test.ts index c76ecbb04699..68b469f49b0a 100644 --- a/yarn-project/archiver/src/archiver/archiver.test.ts +++ b/yarn-project/archiver/src/archiver/archiver.test.ts @@ -174,7 +174,7 @@ describe('Archiver', () => { // Check instrumentation of proven blocks expect(instrumentation.processProofsVerified).toHaveBeenCalledWith([ - { delay: 101000n, l1BlockNumber: 102n, l2BlockNumber: 1n, proverId: proverId.toString() }, + { delay: 1000n, l1BlockNumber: 102n, l2BlockNumber: 1n, proverId: proverId.toString() }, ]); }, 10_000); diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 711440c565e1..d98ff4cb0ac4 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -46,15 +46,16 @@ import { type Chain, type HttpTransport, type PublicClient, createPublicClient, import { type ArchiverDataStore } from './archiver_store.js'; import { type ArchiverConfig } from './config.js'; +import { getL1BlockTime } from './eth_log_handlers.js'; +import { ArchiverInstrumentation } from './instrumentation.js'; import { - type DataRetrieval, type SingletonDataRetrieval, retrieveBlockBodiesFromAvailabilityOracle, retrieveBlockMetadataFromRollup, retrieveL1ToL2Messages, retrieveL2ProofVerifiedEvents, -} from './data_retrieval.js'; -import { ArchiverInstrumentation } from './instrumentation.js'; +} from './structs/data_retrieval.js'; +import { type L1Published } from './structs/published.js'; /** * Helper interface to combine all sources this archiver implementation provides. @@ -246,9 +247,9 @@ export class Archiver implements ArchiveSource { ); await this.store.addBlockBodies(retrievedBlockBodies); - // Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and - // the metadata - let retrievedBlocks: DataRetrieval; + // Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and the metadata + let retrievedBlocks: L1Published[]; + let lastProcessedL1BlockNumber: bigint; { // @todo @LHerskind Investigate how necessary that nextExpectedL2BlockNum really is. // Also, I would expect it to break horribly if we have a reorg. @@ -262,9 +263,7 @@ export class Archiver implements ArchiveSource { nextExpectedL2BlockNum, ); - const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map( - ([header]) => header.contentCommitment.txsEffectsHash, - ); + const retrievedBodyHashes = retrievedBlockMetadata.map(([header]) => header.contentCommitment.txsEffectsHash); // @note @LHerskind We will occasionally be hitting this point BEFORE, we have actually retrieved the bodies. // The main reason this have not been an issue earlier is because: @@ -273,16 +272,16 @@ export class Archiver implements ArchiveSource { // ii) We have been lucky that latency have been small enough to not matter. const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes); - if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) { + if (retrievedBlockMetadata.length !== blockBodiesFromStore.length) { this.log.warn('Block headers length does not equal block bodies length'); } - const blocks: L2Block[] = []; - for (let i = 0; i < retrievedBlockMetadata.retrievedData.length; i++) { - const [header, archive] = retrievedBlockMetadata.retrievedData[i]; + const blocks: L1Published[] = []; + for (let i = 0; i < retrievedBlockMetadata.length; i++) { + const [header, archive, l1] = retrievedBlockMetadata[i]; const blockBody = blockBodiesFromStore[i]; if (blockBody) { - blocks.push(new L2Block(archive, header, blockBody)); + blocks.push({ data: new L2Block(archive, header, blockBody), l1 }); } else { this.log.warn(`Block body not found for block ${header.globalVariables.blockNumber.toBigInt()}.`); } @@ -294,55 +293,56 @@ export class Archiver implements ArchiveSource { } and ${currentL1BlockNumber}.`, ); - retrievedBlocks = { - lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber, - retrievedData: blocks, - }; + retrievedBlocks = blocks; + lastProcessedL1BlockNumber = + retrievedBlockMetadata.length > 0 + ? retrievedBlockMetadata[retrievedBlockMetadata.length - 1][2].blockNumber + : blocksSynchedTo; } this.log.debug( - `Processing retrieved blocks ${retrievedBlocks.retrievedData - .map(b => b.number) - .join(',')} with last processed L1 block ${retrievedBlocks.lastProcessedL1BlockNumber}`, + `Processing retrieved blocks ${retrievedBlocks + .map(b => b.data.number) + .join(',')} with last processed L1 block ${lastProcessedL1BlockNumber}`, ); await Promise.all( - retrievedBlocks.retrievedData.map(block => { - const noteEncryptedLogs = block.body.noteEncryptedLogs; - const encryptedLogs = block.body.encryptedLogs; - const unencryptedLogs = block.body.unencryptedLogs; - return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.number); + retrievedBlocks.map(block => { + const noteEncryptedLogs = block.data.body.noteEncryptedLogs; + const encryptedLogs = block.data.body.encryptedLogs; + const unencryptedLogs = block.data.body.unencryptedLogs; + return this.store.addLogs(noteEncryptedLogs, encryptedLogs, unencryptedLogs, block.data.number); }), ); // Unroll all logs emitted during the retrieved blocks and extract any contract classes and instances from them await Promise.all( - retrievedBlocks.retrievedData.map(async block => { - const blockLogs = block.body.txEffects + retrievedBlocks.map(async block => { + const blockLogs = block.data.body.txEffects .flatMap(txEffect => (txEffect ? [txEffect.unencryptedLogs] : [])) .flatMap(txLog => txLog.unrollLogs()); - await this.storeRegisteredContractClasses(blockLogs, block.number); - await this.storeDeployedContractInstances(blockLogs, block.number); - await this.storeBroadcastedIndividualFunctions(blockLogs, block.number); + await this.storeRegisteredContractClasses(blockLogs, block.data.number); + await this.storeDeployedContractInstances(blockLogs, block.data.number); + await this.storeBroadcastedIndividualFunctions(blockLogs, block.data.number); }), ); - if (retrievedBlocks.retrievedData.length > 0) { + if (retrievedBlocks.length > 0) { const timer = new Timer(); await this.store.addBlocks(retrievedBlocks); this.instrumentation.processNewBlocks( - timer.ms() / retrievedBlocks.retrievedData.length, - retrievedBlocks.retrievedData, + timer.ms() / retrievedBlocks.length, + retrievedBlocks.map(b => b.data), ); - const lastL2BlockNumber = retrievedBlocks.retrievedData[retrievedBlocks.retrievedData.length - 1].number; - this.log.verbose(`Processed ${retrievedBlocks.retrievedData.length} new L2 blocks up to ${lastL2BlockNumber}`); + const lastL2BlockNumber = retrievedBlocks[retrievedBlocks.length - 1].data.number; + this.log.verbose(`Processed ${retrievedBlocks.length} new L2 blocks up to ${lastL2BlockNumber}`); } // Fetch the logs for proven blocks in the block range and update the last proven block number. // Note it's ok to read repeated data here, since we're just using the largest number we see on the logs. await this.updateLastProvenL2Block(blocksSynchedTo, currentL1BlockNumber); - if (retrievedBlocks.retrievedData.length > 0 || blockUntilSynced) { + if (retrievedBlocks.length > 0 || blockUntilSynced) { (blockUntilSynced ? this.log.info : this.log.verbose)(`Synced to L1 block ${currentL1BlockNumber}`); } } @@ -381,24 +381,18 @@ export class Archiver implements ArchiveSource { return; } - // Collect L1 block times for all ProofVerified event logs, this is the time in which the proof was submitted. - const getL1BlockTime = async (blockNumber: bigint) => - (await this.publicClient.getBlock({ includeTransactions: false, blockNumber })).timestamp; - const l1BlockTimes = new Map( await Promise.all( unique(logs.map(log => log.l1BlockNumber)).map( - async blockNumber => [blockNumber, await getL1BlockTime(blockNumber)] as const, + async blockNumber => [blockNumber, await getL1BlockTime(this.publicClient, blockNumber)] as const, ), ), ); // Collect L2 block times for all the blocks verified, this is the time in which the block proven was - // originally submitted, according to the block header global variables. If we stop having this info, - // we'll have to tweak the archiver store to save the L1 block time of when the block is pushed during - // the addBlocks call. + // originally submitted to L1, using the L1 timestamp of the transaction. const getL2BlockTime = async (blockNumber: bigint) => - (await this.store.getBlocks(Number(blockNumber), 1))[0]?.header.globalVariables.timestamp.toBigInt(); + (await this.store.getBlocks(Number(blockNumber), 1))[0]?.l1.timestamp; const l2BlockTimes = new Map( await Promise.all( @@ -521,7 +515,7 @@ export class Archiver implements ArchiveSource { const limitWithProven = proven ? Math.min(limit, Math.max((await this.store.getProvenL2BlockNumber()) - from + 1, 0)) : limit; - return limitWithProven === 0 ? [] : this.store.getBlocks(from, limitWithProven); + return limitWithProven === 0 ? [] : (await this.store.getBlocks(from, limitWithProven)).map(b => b.data); } /** @@ -535,7 +529,7 @@ export class Archiver implements ArchiveSource { number = await this.store.getSynchedL2BlockNumber(); } const blocks = await this.store.getBlocks(number, 1); - return blocks.length === 0 ? undefined : blocks[0]; + return blocks.length === 0 ? undefined : blocks[0].data; } public getTxEffect(txHash: TxHash): Promise { diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index f95270023299..1d7c6c81afee 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -24,7 +24,8 @@ import { type UnconstrainedFunctionWithMembershipProof, } from '@aztec/types/contracts'; -import { type DataRetrieval, type SingletonDataRetrieval } from './data_retrieval.js'; +import { type DataRetrieval, type SingletonDataRetrieval } from './structs/data_retrieval.js'; +import { type L1Published } from './structs/published.js'; /** * Represents the latest L1 block processed by the archiver for various objects in L2. @@ -50,7 +51,7 @@ export interface ArchiverDataStore { * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: DataRetrieval): Promise; + addBlocks(blocks: L1Published[]): Promise; /** * Append new block bodies to the store's list. @@ -73,7 +74,7 @@ export interface ArchiverDataStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks. */ - getBlocks(from: number, limit: number): Promise; + getBlocks(from: number, limit: number): Promise[]>; /** * Gets a tx effect. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 6b8315bbd6b5..3c6127e5b16c 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -15,7 +15,8 @@ import { } from '@aztec/types/contracts'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from './archiver_store.js'; -import { type DataRetrieval } from './data_retrieval.js'; +import { type DataRetrieval } from './structs/data_retrieval.js'; +import { type L1Published } from './structs/published.js'; /** * @param testName - The name of the test suite. @@ -24,24 +25,24 @@ import { type DataRetrieval } from './data_retrieval.js'; export function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore) { describe(testName, () => { let store: ArchiverDataStore; - let blocks: DataRetrieval; + let blocks: L1Published[]; let blockBodies: DataRetrieval; - const blockTests: [number, number, () => L2Block[]][] = [ - [1, 1, () => blocks.retrievedData.slice(0, 1)], - [10, 1, () => blocks.retrievedData.slice(9, 10)], - [1, 10, () => blocks.retrievedData.slice(0, 10)], - [2, 5, () => blocks.retrievedData.slice(1, 6)], - [5, 2, () => blocks.retrievedData.slice(4, 6)], + const blockTests: [number, number, () => L1Published[]][] = [ + [1, 1, () => blocks.slice(0, 1)], + [10, 1, () => blocks.slice(9, 10)], + [1, 10, () => blocks.slice(0, 10)], + [2, 5, () => blocks.slice(1, 6)], + [5, 2, () => blocks.slice(4, 6)], ]; beforeEach(() => { store = getStore(); - blocks = { - lastProcessedL1BlockNumber: 5n, - retrievedData: Array.from({ length: 10 }).map((_, i) => L2Block.random(i + 1)), - }; + blocks = times(10, i => ({ + data: L2Block.random(i + 1), + l1: { blockNumber: BigInt(i + 10), blockHash: `0x${i}`, timestamp: BigInt(i * 1000) }, + })); blockBodies = { - retrievedData: blocks.retrievedData.map(block => block.body), + retrievedData: blocks.map(block => block.data.body), lastProcessedL1BlockNumber: 4n, }; }); @@ -80,7 +81,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('resets `from` to the first block if it is out of range', async () => { - await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.retrievedData.slice(0, 1)); + await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.slice(0, 1)); }); }); @@ -91,7 +92,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it("returns the most recently added block's number", async () => { await store.addBlocks(blocks); - await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.retrievedData.at(-1)!.number); + await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.data.number); }); }); @@ -108,7 +109,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it('returns the L1 block number in which the most recent L2 block was published', async () => { await store.addBlocks(blocks); await expect(store.getSynchPoint()).resolves.toEqual({ - blocksSynchedTo: blocks.lastProcessedL1BlockNumber, + blocksSynchedTo: 19n, messagesSynchedTo: 0n, blockBodiesSynchedTo: 0n, provenLogsSynchedTo: 0n, @@ -151,12 +152,13 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('addLogs', () => { it('adds encrypted & unencrypted logs', async () => { + const block = blocks[0].data; await expect( store.addLogs( - blocks.retrievedData[0].body.noteEncryptedLogs, - blocks.retrievedData[0].body.encryptedLogs, - blocks.retrievedData[0].body.unencryptedLogs, - blocks.retrievedData[0].number, + block.body.noteEncryptedLogs, + block.body.encryptedLogs, + block.body.unencryptedLogs, + block.number, ), ).resolves.toEqual(true); }); @@ -169,12 +171,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch ])('getLogs (%s)', (_, logType) => { beforeEach(async () => { await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => store.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); @@ -184,12 +186,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const expectedLogs = getExpectedBlocks().map(block => { switch (logType) { case LogType.ENCRYPTED: - return block.body.encryptedLogs; + return block.data.body.encryptedLogs; case LogType.NOTEENCRYPTED: - return block.body.noteEncryptedLogs; + return block.data.body.noteEncryptedLogs; case LogType.UNENCRYPTED: default: - return block.body.unencryptedLogs; + return block.data.body.unencryptedLogs; } }); const actualLogs = await store.getLogs(from, limit, logType); @@ -200,12 +202,12 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getTxEffect', () => { beforeEach(async () => { await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => store.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); @@ -214,11 +216,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it.each([ - () => blocks.retrievedData[0].body.txEffects[0], - () => blocks.retrievedData[9].body.txEffects[3], - () => blocks.retrievedData[3].body.txEffects[1], - () => blocks.retrievedData[5].body.txEffects[2], - () => blocks.retrievedData[1].body.txEffects[0], + () => blocks[0].data.body.txEffects[0], + () => blocks[9].data.body.txEffects[3], + () => blocks[3].data.body.txEffects[1], + () => blocks[5].data.body.txEffects[2], + () => blocks[1].data.body.txEffects[0], ])('retrieves a previously stored transaction', async getExpectedTx => { const expectedTx = getExpectedTx(); const actualTx = await store.getTxEffect(expectedTx.txHash); @@ -353,28 +355,24 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const numPublicFunctionCalls = 3; const numUnencryptedLogs = 2; const numBlocks = 10; - let blocks: DataRetrieval; + let blocks: L1Published[]; beforeEach(async () => { - blocks = { - lastProcessedL1BlockNumber: 4n, - retrievedData: Array(numBlocks) - .fill(0) - .map((_, index: number) => - L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), - ), - }; + blocks = times(numBlocks, (index: number) => ({ + data: L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), + l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, + })); await store.addBlocks(blocks); await store.addBlockBodies(blockBodies); await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => store.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); @@ -384,7 +382,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch // get random tx const targetBlockIndex = randomInt(numBlocks); const targetTxIndex = randomInt(txsPerBlock); - const targetTxHash = blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].txHash; + const targetTxHash = blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].txHash; const response = await store.getUnencryptedLogs({ txHash: targetTxHash }); const logs = response.logs; @@ -428,7 +426,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const targetFunctionLogIndex = randomInt(numPublicFunctionCalls); const targetLogIndex = randomInt(numUnencryptedLogs); const targetContractAddress = - blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[ + blocks[targetBlockIndex].data.body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[ targetFunctionLogIndex ].logs[targetLogIndex].contractAddress; diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index 2d844409b4d0..dd62d60f55c8 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -16,6 +16,8 @@ import { slice, } from 'viem'; +import { type L1PublishedData } from './structs/published.js'; + /** * Processes newly received MessageSent (L1 to L2) logs. * @param logs - MessageSent logs. @@ -43,8 +45,8 @@ export async function processL2BlockProcessedLogs( publicClient: PublicClient, expectedL2BlockNumber: bigint, logs: Log[], -): Promise<[Header, AppendOnlyTreeSnapshot][]> { - const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; +): Promise<[Header, AppendOnlyTreeSnapshot, L1PublishedData][]> { + const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, L1PublishedData][] = []; for (const log of logs) { const blockNum = log.args.blockNumber; if (blockNum !== expectedL2BlockNumber) { @@ -57,13 +59,24 @@ export async function processL2BlockProcessedLogs( log.args.blockNumber, ); - retrievedBlockMetadata.push([header, archive]); + const l1: L1PublishedData = { + blockNumber: log.blockNumber, + blockHash: log.blockHash, + timestamp: await getL1BlockTime(publicClient, log.blockNumber), + }; + + retrievedBlockMetadata.push([header, archive, l1]); expectedL2BlockNumber++; } return retrievedBlockMetadata; } +export async function getL1BlockTime(publicClient: PublicClient, blockNumber: bigint): Promise { + const block = await publicClient.getBlock({ blockNumber, includeTransactions: false }); + return block.timestamp; +} + export async function processTxsPublishedLogs( publicClient: PublicClient, logs: Log[], diff --git a/yarn-project/archiver/src/archiver/index.ts b/yarn-project/archiver/src/archiver/index.ts index 81aa8727e172..6d1c72a21baf 100644 --- a/yarn-project/archiver/src/archiver/index.ts +++ b/yarn-project/archiver/src/archiver/index.ts @@ -1,5 +1,6 @@ export * from './archiver.js'; export * from './config.js'; +export { type L1Published, type L1PublishedData } from './structs/published.js'; export { MemoryArchiverStore } from './memory_archiver_store/memory_archiver_store.js'; export { ArchiverDataStore } from './archiver_store.js'; export { KVArchiverDataStore } from './kv_archiver_store/kv_archiver_store.js'; diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts index 006e389b2678..9f1c5b3ac642 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_body_store.ts @@ -2,7 +2,7 @@ import { Body } from '@aztec/circuit-types'; import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval } from '../structs/data_retrieval.js'; export class BlockBodyStore { /** Map block body hash to block body */ diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index 5cf2340d4465..2fb206487b6b 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -3,7 +3,7 @@ import { AppendOnlyTreeSnapshot, type AztecAddress, Header, INITIAL_L2_BLOCK_NUM import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton, type Range } from '@aztec/kv-store'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type L1Published, type L1PublishedData } from '../structs/published.js'; import { type BlockBodyStore } from './block_body_store.js'; type BlockIndexValue = [blockNumber: number, index: number]; @@ -11,6 +11,7 @@ type BlockIndexValue = [blockNumber: number, index: number]; type BlockStorage = { header: Buffer; archive: Buffer; + l1: L1PublishedData; }; /** @@ -22,9 +23,6 @@ export class BlockStore { /** Stores L1 block number in which the last processed L2 block was included */ #lastSynchedL1Block: AztecSingleton; - /** Stores last proven L2 block number */ - #lastProvenL2Block: AztecSingleton; - /** Index mapping transaction hash (as a string) to its location in a block */ #txIndex: AztecMap; @@ -42,28 +40,32 @@ export class BlockStore { this.#txIndex = db.openMap('archiver_tx_index'); this.#contractIndex = db.openMap('archiver_contract_index'); this.#lastSynchedL1Block = db.openSingleton('archiver_last_synched_l1_block'); - this.#lastProvenL2Block = db.openSingleton('archiver_last_proven_l2_block'); } /** * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. + * @param blocks - The L2 blocks to be added to the store. * @returns True if the operation is successful. */ - addBlocks(blocks: DataRetrieval): Promise { + addBlocks(blocks: L1Published[]): Promise { + if (blocks.length === 0) { + return Promise.resolve(true); + } + return this.db.transaction(() => { - for (const block of blocks.retrievedData) { - void this.#blocks.set(block.number, { - header: block.header.toBuffer(), - archive: block.archive.toBuffer(), + for (const block of blocks) { + void this.#blocks.set(block.data.number, { + header: block.data.header.toBuffer(), + archive: block.data.archive.toBuffer(), + l1: block.l1, }); - block.body.txEffects.forEach((tx, i) => { - void this.#txIndex.set(tx.txHash.toString(), [block.number, i]); + block.data.body.txEffects.forEach((tx, i) => { + void this.#txIndex.set(tx.txHash.toString(), [block.data.number, i]); }); } - void this.#lastSynchedL1Block.set(blocks.lastProcessedL1BlockNumber); + void this.#lastSynchedL1Block.set(blocks[blocks.length - 1].l1.blockNumber); return true; }); @@ -75,7 +77,7 @@ export class BlockStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks */ - *getBlocks(start: number, limit: number): IterableIterator { + *getBlocks(start: number, limit: number): IterableIterator> { for (const blockStorage of this.#blocks.values(this.#computeBlockRange(start, limit))) { yield this.getBlockFromBlockStorage(blockStorage); } @@ -86,7 +88,7 @@ export class BlockStore { * @param blockNumber - The number of the block to return. * @returns The requested L2 block. */ - getBlock(blockNumber: number): L2Block | undefined { + getBlock(blockNumber: number): L1Published | undefined { const blockStorage = this.#blocks.get(blockNumber); if (!blockStorage || !blockStorage.header) { return undefined; @@ -104,11 +106,8 @@ export class BlockStore { throw new Error('Body is not able to be retrieved from BodyStore'); } - return L2Block.fromFields({ - header, - archive, - body, - }); + const l2Block = L2Block.fromFields({ header, archive, body }); + return { data: l2Block, l1: blockStorage.l1 }; } /** @@ -123,7 +122,7 @@ export class BlockStore { } const block = this.getBlock(blockNumber); - return block?.body.txEffects[txIndex]; + return block?.data.body.txEffects[txIndex]; } /** @@ -138,15 +137,15 @@ export class BlockStore { } const block = this.getBlock(blockNumber)!; - const tx = block.body.txEffects[txIndex]; + const tx = block.data.body.txEffects[txIndex]; return new TxReceipt( txHash, TxReceipt.statusFromRevertCode(tx.revertCode), '', tx.transactionFee.toBigInt(), - block.hash().toBuffer(), - block.number, + block.data.hash().toBuffer(), + block.data.number, ); } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 71daa8bd5549..7a71f7861ea7 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -27,7 +27,8 @@ import { } from '@aztec/types/contracts'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from '../archiver_store.js'; -import { type DataRetrieval, type SingletonDataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval, type SingletonDataRetrieval } from '../structs/data_retrieval.js'; +import { type L1Published } from '../structs/published.js'; import { BlockBodyStore } from './block_body_store.js'; import { BlockStore } from './block_store.js'; import { ContractArtifactsStore } from './contract_artifacts_store.js'; @@ -123,7 +124,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: DataRetrieval): Promise { + addBlocks(blocks: L1Published[]): Promise { return this.#blockStore.addBlocks(blocks); } @@ -134,7 +135,7 @@ export class KVArchiverDataStore implements ArchiverDataStore { * @param limit - The number of blocks to return. * @returns The requested L2 blocks */ - getBlocks(start: number, limit: number): Promise { + getBlocks(start: number, limit: number): Promise[]> { try { return Promise.resolve(Array.from(this.#blockStore.getBlocks(start, limit))); } catch (err) { diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts index 8b6023ad65c3..0d412b4b70ed 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/message_store.ts @@ -8,7 +8,7 @@ import { import { createDebugLogger } from '@aztec/foundation/log'; import { type AztecKVStore, type AztecMap, type AztecSingleton } from '@aztec/kv-store'; -import { type DataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval } from '../structs/data_retrieval.js'; /** * LMDB implementation of the ArchiverDataStore interface. diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts index f300be4aae5a..2009ce806276 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/proven_store.ts @@ -1,6 +1,6 @@ import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; -import { type SingletonDataRetrieval } from '../data_retrieval.js'; +import { type SingletonDataRetrieval } from '../structs/data_retrieval.js'; export class ProvenStore { /** Stores L1 block number in which the last processed L2 block was included */ diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts index 51f93c71c86d..de8237cf336b 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts @@ -1,4 +1,5 @@ import { L2Block } from '@aztec/circuit-types'; +import { times } from '@aztec/foundation/collection'; import { type ArchiverDataStore } from '../archiver_store.js'; import { describeArchiverDataStore } from '../archiver_store_test_suite.js'; @@ -17,21 +18,19 @@ describe('MemoryArchiverStore', () => { it('does not return more than "maxLogs" logs', async () => { const maxLogs = 5; archiverStore = new MemoryArchiverStore(maxLogs); - const blocks = { - lastProcessedL1BlockNumber: 3n, - retrievedData: Array(10) - .fill(0) - .map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2)), - }; + const blocks = times(10, (index: number) => ({ + data: L2Block.random(index + 1, 4, 2, 3, 2, 2), + l1: { blockNumber: BigInt(index), blockHash: `0x${index}`, timestamp: BigInt(index) }, + })); await archiverStore.addBlocks(blocks); await Promise.all( - blocks.retrievedData.map(block => + blocks.map(block => archiverStore.addLogs( - block.body.noteEncryptedLogs, - block.body.encryptedLogs, - block.body.unencryptedLogs, - block.number, + block.data.body.noteEncryptedLogs, + block.data.body.encryptedLogs, + block.data.body.unencryptedLogs, + block.data.number, ), ), ); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index ead3905272dd..27e69b174e3f 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -27,7 +27,8 @@ import { } from '@aztec/types/contracts'; import { type ArchiverDataStore, type ArchiverL1SynchPoint } from '../archiver_store.js'; -import { type DataRetrieval, type SingletonDataRetrieval } from '../data_retrieval.js'; +import { type DataRetrieval, type SingletonDataRetrieval } from '../structs/data_retrieval.js'; +import { type L1Published } from '../structs/published.js'; import { L1ToL2MessageStore } from './l1_to_l2_message_store.js'; /** @@ -37,7 +38,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { /** * An array containing all the L2 blocks that have been fetched so far. */ - private l2Blocks: L2Block[] = []; + private l2Blocks: L1Published[] = []; /** * A mapping of body hash to body @@ -154,10 +155,14 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - public addBlocks(blocks: DataRetrieval): Promise { - this.lastL1BlockNewBlocks = blocks.lastProcessedL1BlockNumber; - this.l2Blocks.push(...blocks.retrievedData); - this.txEffects.push(...blocks.retrievedData.flatMap(b => b.body.txEffects)); + public addBlocks(blocks: L1Published[]): Promise { + if (blocks.length === 0) { + return Promise.resolve(true); + } + + this.lastL1BlockNewBlocks = blocks[blocks.length - 1].l1.blockNumber; + this.l2Blocks.push(...blocks); + this.txEffects.push(...blocks.flatMap(b => b.data.body.txEffects)); return Promise.resolve(true); } @@ -248,7 +253,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { * @returns The requested L2 blocks. * @remarks When "from" is smaller than genesis block number, blocks from the beginning are returned. */ - public getBlocks(from: number, limit: number): Promise { + public getBlocks(from: number, limit: number): Promise[]> { // Return an empty array if we are outside of range if (limit < 1) { return Promise.reject(new Error(`Invalid limit: ${limit}`)); @@ -280,7 +285,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { */ public getSettledTxReceipt(txHash: TxHash): Promise { for (const block of this.l2Blocks) { - for (const txEffect of block.body.txEffects) { + for (const txEffect of block.data.body.txEffects) { if (txEffect.txHash.equals(txHash)) { return Promise.resolve( new TxReceipt( @@ -288,8 +293,8 @@ export class MemoryArchiverStore implements ArchiverDataStore { TxReceipt.statusFromRevertCode(txEffect.revertCode), '', txEffect.transactionFee.toBigInt(), - block.hash().toBuffer(), - block.number, + block.data.hash().toBuffer(), + block.data.number, ), ); } @@ -399,10 +404,10 @@ export class MemoryArchiverStore implements ArchiverDataStore { for (; logIndexInTx < txLogs.length; logIndexInTx++) { const log = txLogs[logIndexInTx]; if ( - (!txHash || block.body.txEffects[txIndexInBlock].txHash.equals(txHash)) && + (!txHash || block.data.body.txEffects[txIndexInBlock].txHash.equals(txHash)) && (!contractAddress || log.contractAddress.equals(contractAddress)) ) { - logs.push(new ExtendedUnencryptedL2Log(new LogId(block.number, txIndexInBlock, logIndexInTx), log)); + logs.push(new ExtendedUnencryptedL2Log(new LogId(block.data.number, txIndexInBlock, logIndexInTx), log)); if (logs.length === this.maxLogs) { return Promise.resolve({ logs, @@ -430,7 +435,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { if (this.l2Blocks.length === 0) { return Promise.resolve(INITIAL_L2_BLOCK_NUM - 1); } - return Promise.resolve(this.l2Blocks[this.l2Blocks.length - 1].number); + return Promise.resolve(this.l2Blocks[this.l2Blocks.length - 1].data.number); } public getProvenL2BlockNumber(): Promise { diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/structs/data_retrieval.ts similarity index 96% rename from yarn-project/archiver/src/archiver/data_retrieval.ts rename to yarn-project/archiver/src/archiver/structs/data_retrieval.ts index 60e6b41bf5e7..1d86645decd6 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/structs/data_retrieval.ts @@ -13,7 +13,8 @@ import { processL2BlockProcessedLogs, processMessageSentLogs, processTxsPublishedLogs, -} from './eth_log_handlers.js'; +} from '../eth_log_handlers.js'; +import { type L1PublishedData } from './published.js'; /** * Data retrieved from logs @@ -61,8 +62,8 @@ export async function retrieveBlockMetadataFromRollup( searchEndBlock: bigint, expectedNextL2BlockNum: bigint, logger: DebugLogger = createDebugLogger('aztec:archiver'), -): Promise> { - const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; +): Promise<[Header, AppendOnlyTreeSnapshot, L1PublishedData][]> { + const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, L1PublishedData][] = []; do { if (searchStartBlock > searchEndBlock) { break; @@ -91,7 +92,7 @@ export async function retrieveBlockMetadataFromRollup( searchStartBlock = lastLog.blockNumber! + 1n; expectedNextL2BlockNum += BigInt(newBlockMetadata.length); } while (blockUntilSynced && searchStartBlock <= searchEndBlock); - return { lastProcessedL1BlockNumber: searchStartBlock - 1n, retrievedData: retrievedBlockMetadata }; + return retrievedBlockMetadata; } /** diff --git a/yarn-project/archiver/src/archiver/structs/published.ts b/yarn-project/archiver/src/archiver/structs/published.ts new file mode 100644 index 000000000000..12a469c1715b --- /dev/null +++ b/yarn-project/archiver/src/archiver/structs/published.ts @@ -0,0 +1,11 @@ +/** Extends a type with L1 published info (block number, hash, and timestamp) */ +export type L1Published = { + data: T; + l1: L1PublishedData; +}; + +export type L1PublishedData = { + blockNumber: bigint; + timestamp: bigint; + blockHash: string; +}; diff --git a/yarn-project/archiver/src/index.ts b/yarn-project/archiver/src/index.ts index 523565aea4e4..97aa8b93caf6 100644 --- a/yarn-project/archiver/src/index.ts +++ b/yarn-project/archiver/src/index.ts @@ -14,7 +14,7 @@ export * from './rpc/index.js'; export * from './factory.js'; // We are not storing the info from these events in the archiver for now (and we don't really need to), so we expose this query directly -export { retrieveL2ProofVerifiedEvents } from './archiver/data_retrieval.js'; +export { retrieveL2ProofVerifiedEvents } from './archiver/structs/data_retrieval.js'; const log = createDebugLogger('aztec:archiver');