From fee8bd3533b74d32e9d5a4d09b93788562eb7000 Mon Sep 17 00:00:00 2001 From: benesjan Date: Mon, 18 Mar 2024 09:33:45 +0000 Subject: [PATCH] refactor: nuking L1 block number from L2 block --- .../archiver/src/archiver/archiver.ts | 75 +++++++-------- .../archiver/src/archiver/archiver_store.ts | 4 +- .../src/archiver/archiver_store_test_suite.ts | 91 +++++++++++-------- .../archiver/src/archiver/data_retrieval.ts | 6 +- .../archiver/src/archiver/eth_log_handlers.ts | 8 +- .../archiver/kv_archiver_store/block_store.ts | 23 +++-- .../kv_archiver_store/kv_archiver_store.ts | 4 +- .../memory_archiver_store.test.ts | 13 ++- .../memory_archiver_store.ts | 19 ++-- yarn-project/circuit-types/src/l2_block.ts | 61 +++---------- 10 files changed, 145 insertions(+), 159 deletions(-) diff --git a/yarn-project/archiver/src/archiver/archiver.ts b/yarn-project/archiver/src/archiver/archiver.ts index 395c227343e1..ac687a150bb3 100644 --- a/yarn-project/archiver/src/archiver/archiver.ts +++ b/yarn-project/archiver/src/archiver/archiver.ts @@ -33,6 +33,7 @@ import { Chain, HttpTransport, PublicClient, createPublicClient, http } from 'vi import { ArchiverDataStore } from './archiver_store.js'; import { ArchiverConfig } from './config.js'; import { + DataRetrieval, retrieveBlockBodiesFromAvailabilityOracle, retrieveBlockMetadataFromRollup, retrieveL1ToL2Messages, @@ -214,50 +215,50 @@ export class Archiver implements ArchiveSource { ); const blockBodies = retrievedBlockBodies.retrievedData.map(([blockBody]) => blockBody); - await this.store.addBlockBodies(blockBodies); - const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup( - this.publicClient, - this.rollupAddress, - blockUntilSynced, - lastL1Blocks.blocks + 1n, - currentL1BlockNumber, - nextExpectedL2BlockNum, - ); - - const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map( - ([header]) => header.contentCommitment.txsEffectsHash, - ); + // Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and + // the metadata + let retrievedBlocks: DataRetrieval; + { + const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup( + this.publicClient, + this.rollupAddress, + blockUntilSynced, + lastL1Blocks.blocks + 1n, + currentL1BlockNumber, + nextExpectedL2BlockNum, + ); - const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes); + const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map( + ([header]) => header.contentCommitment.txsEffectsHash, + ); - if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) { - throw new Error('Block headers length does not equal block bodies length'); - } + const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes); - const retrievedBlocks = { - retrievedData: retrievedBlockMetadata.retrievedData.map( - (blockMetadata, i) => - new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i], blockMetadata[2]), - ), - }; + if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) { + throw new Error('Block headers length does not equal block bodies length'); + } - if (retrievedBlocks.retrievedData.length === 0) { - return; - } else { - this.log( - `Retrieved ${retrievedBlocks.retrievedData.length} new L2 blocks between L1 blocks ${ - lastL1Blocks.blocks + 1n - } and ${currentL1BlockNumber}.`, + const blocks = retrievedBlockMetadata.retrievedData.map( + (blockMetadata, i) => new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i]), ); - } - // create the block number -> block hash mapping to ensure we retrieve the appropriate events - const blockNumberToBodyHash: { [key: number]: Buffer | undefined } = {}; - retrievedBlocks.retrievedData.forEach((block: L2Block) => { - blockNumberToBodyHash[block.number] = block.header.contentCommitment.txsEffectsHash; - }); + if (blocks.length === 0) { + return; + } else { + this.log( + `Retrieved ${blocks.length} new L2 blocks between L1 blocks ${ + lastL1Blocks.blocks + 1n + } and ${currentL1BlockNumber}.`, + ); + } + + retrievedBlocks = { + lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber, + retrievedData: blocks, + }; + } await Promise.all( retrievedBlocks.retrievedData.map(block => { @@ -280,7 +281,7 @@ export class Archiver implements ArchiveSource { }), ); - await this.store.addBlocks(retrievedBlocks.retrievedData); + await this.store.addBlocks(retrievedBlocks); } /** diff --git a/yarn-project/archiver/src/archiver/archiver_store.ts b/yarn-project/archiver/src/archiver/archiver_store.ts index 30876c75e9bd..9569a12bb36e 100644 --- a/yarn-project/archiver/src/archiver/archiver_store.ts +++ b/yarn-project/archiver/src/archiver/archiver_store.ts @@ -31,10 +31,10 @@ export type ArchiverL1SynchPoint = { export interface ArchiverDataStore { /** * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store. + * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: L2Block[]): Promise; + addBlocks(blocks: DataRetrieval): Promise; /** * Append new block bodies to the store's list. diff --git a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts index 2bb4badb706e..0123bc06a6fc 100644 --- a/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts +++ b/yarn-project/archiver/src/archiver/archiver_store_test_suite.ts @@ -6,6 +6,7 @@ import { randomBytes, randomInt } from '@aztec/foundation/crypto'; import { ContractClassPublic, ContractInstanceWithAddress, SerializableContractInstance } from '@aztec/types/contracts'; import { ArchiverDataStore } from './archiver_store.js'; +import { DataRetrieval } from './data_retrieval.js'; /** * @param testName - The name of the test suite. @@ -14,27 +15,26 @@ import { ArchiverDataStore } from './archiver_store.js'; export function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore) { describe(testName, () => { let store: ArchiverDataStore; - let blocks: L2Block[]; + let blocks: DataRetrieval; const blockTests: [number, number, () => L2Block[]][] = [ - [1, 1, () => blocks.slice(0, 1)], - [10, 1, () => blocks.slice(9, 10)], - [1, 10, () => blocks.slice(0, 10)], - [2, 5, () => blocks.slice(1, 6)], - [5, 2, () => blocks.slice(4, 6)], + [1, 1, () => blocks.retrievedData.slice(0, 1)], + [10, 1, () => blocks.retrievedData.slice(9, 10)], + [1, 10, () => blocks.retrievedData.slice(0, 10)], + [2, 5, () => blocks.retrievedData.slice(1, 6)], + [5, 2, () => blocks.retrievedData.slice(4, 6)], ]; beforeEach(() => { store = getStore(); - blocks = Array.from({ length: 10 }).map((_, i) => { - const block = L2Block.random(i + 1); - block.setL1BlockNumber(BigInt(i + 1)); - return block; - }); + blocks = { + lastProcessedL1BlockNumber: 5n, + retrievedData: Array.from({ length: 10 }).map((_, i) => L2Block.random(i + 1)), + }; }); describe('addBlocks', () => { it('returns success when adding block bodies', async () => { - await expect(store.addBlockBodies(blocks.map(block => block.body))).resolves.toBe(true); + await expect(store.addBlockBodies(blocks.retrievedData.map(block => block.body))).resolves.toBe(true); }); it('returns success when adding blocks', async () => { @@ -50,7 +50,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getBlocks', () => { beforeEach(async () => { await store.addBlocks(blocks); - await store.addBlockBodies(blocks.map(block => block.body)); + await store.addBlockBodies(blocks.retrievedData.map(block => block.body)); }); it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => { @@ -66,7 +66,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch }); it('resets `from` to the first block if it is out of range', async () => { - await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.slice(0, 1)); + await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.retrievedData.slice(0, 1)); }); }); @@ -77,7 +77,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it("returns the most recently added block's number", async () => { await store.addBlocks(blocks); - await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.number); + await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.retrievedData.at(-1)!.number); }); }); @@ -92,7 +92,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch it('returns the L1 block number in which the most recent L2 block was published', async () => { await store.addBlocks(blocks); await expect(store.getSynchedL1BlockNumbers()).resolves.toEqual({ - blocks: blocks.at(-1)!.getL1BlockNumber(), + blocks: blocks.lastProcessedL1BlockNumber, messages: 0n, }); }); @@ -109,7 +109,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('addLogs', () => { it('adds encrypted & unencrypted logs', async () => { await expect( - store.addLogs(blocks[0].body.encryptedLogs, blocks[0].body.unencryptedLogs, blocks[0].number), + store.addLogs( + blocks.retrievedData[0].body.encryptedLogs, + blocks.retrievedData[0].body.unencryptedLogs, + blocks.retrievedData[0].number, + ), ).resolves.toEqual(true); }); }); @@ -120,7 +124,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch ])('getLogs (%s)', (_, logType) => { beforeEach(async () => { await Promise.all( - blocks.map(block => store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)), + blocks.retrievedData.map(block => + store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + ), ); }); @@ -136,18 +142,20 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch describe('getTxEffect', () => { beforeEach(async () => { await Promise.all( - blocks.map(block => store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)), + blocks.retrievedData.map(block => + store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + ), ); await store.addBlocks(blocks); - await store.addBlockBodies(blocks.map(block => block.body)); + await store.addBlockBodies(blocks.retrievedData.map(block => block.body)); }); it.each([ - () => blocks[0].getTx(0), - () => blocks[9].getTx(3), - () => blocks[3].getTx(1), - () => blocks[5].getTx(2), - () => blocks[1].getTx(0), + () => blocks.retrievedData[0].getTx(0), + () => blocks.retrievedData[9].getTx(3), + () => blocks.retrievedData[3].getTx(1), + () => blocks.retrievedData[5].getTx(2), + () => blocks.retrievedData[1].getTx(0), ])('retrieves a previously stored transaction', async getExpectedTx => { const expectedTx = getExpectedTx(); const actualTx = await store.getTxEffect(expectedTx.txHash); @@ -238,20 +246,25 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const numPublicFunctionCalls = 3; const numUnencryptedLogs = 4; const numBlocks = 10; - let blocks: L2Block[]; + let blocks: DataRetrieval; beforeEach(async () => { - blocks = Array(numBlocks) - .fill(0) - .map((_, index: number) => - L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), - ); + blocks = { + lastProcessedL1BlockNumber: 4n, + retrievedData: Array(numBlocks) + .fill(0) + .map((_, index: number) => + L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs), + ), + }; await store.addBlocks(blocks); - await store.addBlockBodies(blocks.map(block => block.body)); + await store.addBlockBodies(blocks.retrievedData.map(block => block.body)); await Promise.all( - blocks.map(block => store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)), + blocks.retrievedData.map(block => + store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + ), ); }); @@ -259,7 +272,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch // get random tx const targetBlockIndex = randomInt(numBlocks); const targetTxIndex = randomInt(txsPerBlock); - const targetTxHash = new L2BlockContext(blocks[targetBlockIndex]).getTxHash(targetTxIndex); + const targetTxHash = new L2BlockContext(blocks.retrievedData[targetBlockIndex]).getTxHash(targetTxIndex); const response = await store.getUnencryptedLogs({ txHash: targetTxHash }); const logs = response.logs; @@ -303,8 +316,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const targetFunctionLogIndex = randomInt(numPublicFunctionCalls); const targetLogIndex = randomInt(numUnencryptedLogs); const targetContractAddress = UnencryptedL2Log.fromBuffer( - blocks[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[targetFunctionLogIndex] - .logs[targetLogIndex], + blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[ + targetFunctionLogIndex + ].logs[targetLogIndex], ).contractAddress; const response = await store.getUnencryptedLogs({ contractAddress: targetContractAddress }); @@ -323,8 +337,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch const targetFunctionLogIndex = randomInt(numPublicFunctionCalls); const targetLogIndex = randomInt(numUnencryptedLogs); const targetSelector = UnencryptedL2Log.fromBuffer( - blocks[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[targetFunctionLogIndex] - .logs[targetLogIndex], + blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[ + targetFunctionLogIndex + ].logs[targetLogIndex], ).selector; const response = await store.getUnencryptedLogs({ selector: targetSelector }); diff --git a/yarn-project/archiver/src/archiver/data_retrieval.ts b/yarn-project/archiver/src/archiver/data_retrieval.ts index 25d4f90be2df..a25b2a8abe07 100644 --- a/yarn-project/archiver/src/archiver/data_retrieval.ts +++ b/yarn-project/archiver/src/archiver/data_retrieval.ts @@ -35,7 +35,7 @@ type DataRetrieval = { * @param searchStartBlock - The block number to use for starting the search. * @param searchEndBlock - The highest block number that we should search up to. * @param expectedNextL2BlockNum - The next L2 block number that we expect to find. - * @returns An array of tuples representing block metadata including the header, archive tree snapshot, and associated l1 block number; as well as the next eth block to search from. + * @returns An array of tuples representing block metadata including the header, archive tree snapshot; as well as the next eth block to search from. */ export async function retrieveBlockMetadataFromRollup( publicClient: PublicClient, @@ -44,8 +44,8 @@ export async function retrieveBlockMetadataFromRollup( searchStartBlock: bigint, searchEndBlock: bigint, expectedNextL2BlockNum: bigint, -): Promise> { - const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, bigint][] = []; +): Promise> { + const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; do { if (searchStartBlock > searchEndBlock) { break; diff --git a/yarn-project/archiver/src/archiver/eth_log_handlers.ts b/yarn-project/archiver/src/archiver/eth_log_handlers.ts index b20b770973bf..62404e9d5046 100644 --- a/yarn-project/archiver/src/archiver/eth_log_handlers.ts +++ b/yarn-project/archiver/src/archiver/eth_log_handlers.ts @@ -28,14 +28,14 @@ export function processLeafInsertedLogs( * @param publicClient - The viem public client to use for transaction retrieval. * @param expectedL2BlockNumber - The next expected L2 block number. * @param logs - L2BlockProcessed logs. - * @returns - An array of tuples representing block metadata including the header, archive tree snapshot, and associated l1 block number. + * @returns - An array of tuples representing block metadata including the header, archive tree snapshot. */ export async function processL2BlockProcessedLogs( publicClient: PublicClient, expectedL2BlockNumber: bigint, logs: Log[], -): Promise<[Header, AppendOnlyTreeSnapshot, bigint][]> { - const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, bigint][] = []; +): Promise<[Header, AppendOnlyTreeSnapshot][]> { + const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = []; for (const log of logs) { const blockNum = log.args.blockNumber; if (blockNum !== expectedL2BlockNumber) { @@ -48,7 +48,7 @@ export async function processL2BlockProcessedLogs( log.args.blockNumber, ); - retrievedBlockMetadata.push([header, archive, log.blockNumber!]); + retrievedBlockMetadata.push([header, archive]); expectedL2BlockNumber++; } diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts index e4ae4a7b161e..30c0b012508d 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/block_store.ts @@ -1,14 +1,14 @@ import { L2Block, TxEffect, TxHash, TxReceipt, TxStatus } from '@aztec/circuit-types'; import { AppendOnlyTreeSnapshot, AztecAddress, Header, INITIAL_L2_BLOCK_NUM } from '@aztec/circuits.js'; import { createDebugLogger } from '@aztec/foundation/log'; -import { AztecKVStore, AztecMap, Range } from '@aztec/kv-store'; +import { AztecKVStore, AztecMap, AztecSingleton, Range } from '@aztec/kv-store'; +import { DataRetrieval } from '../data_retrieval.js'; import { BlockBodyStore } from './block_body_store.js'; type BlockIndexValue = [blockNumber: number, index: number]; type BlockStorage = { - l1BlockNumber: bigint; header: Buffer; archive: Buffer; }; @@ -19,6 +19,8 @@ type BlockStorage = { export class BlockStore { /** Map block number to block data */ #blocks: AztecMap; + /** Stores L1 block number in which the last processed L2 block was included */ + #lastSynchedL1Block: AztecSingleton; /** Index mapping transaction hash (as a string) to its location in a block */ #txIndex: AztecMap; @@ -36,20 +38,20 @@ export class BlockStore { this.#blocks = db.openMap('archiver_blocks'); this.#txIndex = db.openMap('archiver_tx_index'); this.#contractIndex = db.openMap('archiver_contract_index'); + this.#lastSynchedL1Block = db.openSingleton('archiver_last_synched_l1_block'); } /** * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store. + * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: L2Block[]): Promise { + addBlocks(blocks: DataRetrieval): Promise { return this.db.transaction(() => { - for (const block of blocks) { + for (const block of blocks.retrievedData) { void this.#blocks.set(block.number, { header: block.header.toBuffer(), archive: block.archive.toBuffer(), - l1BlockNumber: block.getL1BlockNumber(), }); block.getTxs().forEach((tx, i) => { @@ -57,6 +59,8 @@ export class BlockStore { }); } + void this.#lastSynchedL1Block.set(blocks.lastProcessedL1BlockNumber); + return true; }); } @@ -165,12 +169,7 @@ export class BlockStore { * @returns The L1 block that published the latest L2 block */ getSynchedL1BlockNumber(): bigint { - const [lastBlock] = this.#blocks.values({ reverse: true, limit: 1 }); - if (!lastBlock) { - return 0n; - } else { - return lastBlock.l1BlockNumber; - } + return this.#lastSynchedL1Block.get() ?? 0n; } #computeBlockRange(start: number, limit: number): Required, 'start' | 'end'>> { diff --git a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts index 834737e5a9d7..a9bb202ce619 100644 --- a/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/kv_archiver_store/kv_archiver_store.ts @@ -87,10 +87,10 @@ export class KVArchiverDataStore implements ArchiverDataStore { /** * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store. + * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. * @returns True if the operation is successful. */ - addBlocks(blocks: L2Block[]): Promise { + addBlocks(blocks: DataRetrieval): Promise { return this.#blockStore.addBlocks(blocks); } diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts index 02da55b34152..38707f67190a 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.test.ts @@ -17,13 +17,18 @@ describe('MemoryArchiverStore', () => { it('does not return more than "maxLogs" logs', async () => { const maxLogs = 5; archiverStore = new MemoryArchiverStore(maxLogs); - const blocks = Array(10) - .fill(0) - .map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2)); + const blocks = { + lastProcessedL1BlockNumber: 3n, + retrievedData: Array(10) + .fill(0) + .map((_, index: number) => L2Block.random(index + 1, 4, 2, 3, 2, 2)), + }; await archiverStore.addBlocks(blocks); await Promise.all( - blocks.map(block => archiverStore.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)), + blocks.retrievedData.map(block => + archiverStore.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number), + ), ); const response = await archiverStore.getUnencryptedLogs({}); diff --git a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts index 657ea8679c08..4fab13918ad2 100644 --- a/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts +++ b/yarn-project/archiver/src/archiver/memory_archiver_store/memory_archiver_store.ts @@ -62,6 +62,7 @@ export class MemoryArchiverStore implements ArchiverDataStore { private contractInstances: Map = new Map(); + private lastL1BlockNewBlocks: bigint = 0n; private lastL1BlockNewMessages: bigint = 0n; constructor( @@ -97,12 +98,13 @@ export class MemoryArchiverStore implements ArchiverDataStore { /** * Append new blocks to the store's list. - * @param blocks - The L2 blocks to be added to the store. - * @returns True if the operation is successful (always in this implementation). + * @param blocks - The L2 blocks to be added to the store and the last processed L1 block. + * @returns True if the operation is successful. */ - public addBlocks(blocks: L2Block[]): Promise { - this.l2BlockContexts.push(...blocks.map(block => new L2BlockContext(block))); - this.txEffects.push(...blocks.flatMap(b => b.getTxs())); + public addBlocks(blocks: DataRetrieval): Promise { + this.lastL1BlockNewBlocks = blocks.lastProcessedL1BlockNumber; + this.l2BlockContexts.push(...blocks.retrievedData.map(block => new L2BlockContext(block))); + this.txEffects.push(...blocks.retrievedData.flatMap(b => b.getTxs())); return Promise.resolve(true); } @@ -356,12 +358,9 @@ export class MemoryArchiverStore implements ArchiverDataStore { } public getSynchedL1BlockNumbers(): Promise { - const blocks = this.l2BlockContexts[this.l2BlockContexts.length - 1]?.block?.getL1BlockNumber() ?? 0n; - const messages = this.lastL1BlockNewMessages; - return Promise.resolve({ - blocks, - messages, + blocks: this.lastL1BlockNewBlocks, + messages: this.lastL1BlockNewMessages, }); } } diff --git a/yarn-project/circuit-types/src/l2_block.ts b/yarn-project/circuit-types/src/l2_block.ts index e992456a7a05..a9b1ffd1bce9 100644 --- a/yarn-project/circuit-types/src/l2_block.ts +++ b/yarn-project/circuit-types/src/l2_block.ts @@ -10,8 +10,6 @@ import { makeAppendOnlyTreeSnapshot, makeHeader } from './l2_block_code_to_purge * The data that makes up the rollup proof, with encoder decoder functions. */ export class L2Block { - #l1BlockNumber?: bigint; - constructor( /** Snapshot of archive tree after the block is applied. */ public archive: AppendOnlyTreeSnapshot, @@ -19,30 +17,22 @@ export class L2Block { public header: Header, /** L2 block body. */ public body: Body, - /** Associated L1 block num */ - l1BlockNumber?: bigint, - ) { - this.#l1BlockNumber = l1BlockNumber; - } + ) {} /** * Constructs a new instance from named fields. * @param fields - Fields to pass to the constructor. * @param blockHash - Hash of the block. - * @param l1BlockNumber - The block number of the L1 block that contains this L2 block. * @returns A new instance. */ - static fromFields( - fields: { - /** Snapshot of archive tree after the block is applied. */ - archive: AppendOnlyTreeSnapshot; - /** L2 block header. */ - header: Header; - body: Body; - }, - l1BlockNumber?: bigint, - ) { - return new this(fields.archive, fields.header, fields.body, l1BlockNumber); + static fromFields(fields: { + /** Snapshot of archive tree after the block is applied. */ + archive: AppendOnlyTreeSnapshot; + /** L2 block header. */ + header: Header; + body: Body; + }) { + return new this(fields.archive, fields.header, fields.body); } /** @@ -115,40 +105,17 @@ export class L2Block { const txsEffectsHash = body.getTxsEffectsHash(); - return L2Block.fromFields( - { - archive: makeAppendOnlyTreeSnapshot(1), - header: makeHeader(0, l2BlockNum, txsEffectsHash), - body, - }, - // just for testing purposes, each random L2 block got emitted in the equivalent L1 block - BigInt(l2BlockNum), - ); + return L2Block.fromFields({ + archive: makeAppendOnlyTreeSnapshot(1), + header: makeHeader(0, l2BlockNum, txsEffectsHash), + body, + }); } get number(): number { return Number(this.header.globalVariables.blockNumber.toBigInt()); } - /** - * Gets the L1 block number that included this block - */ - public getL1BlockNumber(): bigint { - if (typeof this.#l1BlockNumber === 'undefined') { - throw new Error('L1 block number has to be attached before calling "getL1BlockNumber"'); - } - - return this.#l1BlockNumber; - } - - /** - * Sets the L1 block number that included this block - * @param l1BlockNumber - The block number of the L1 block that contains this L2 block. - */ - public setL1BlockNumber(l1BlockNumber: bigint) { - this.#l1BlockNumber = l1BlockNumber; - } - /** * Returns the block's hash (hash of block header). * @returns The block's hash.