Skip to content

Commit

Permalink
refactor: nuking L1 block number from L2 block
Browse files Browse the repository at this point in the history
  • Loading branch information
benesjan committed Mar 18, 2024
1 parent a57663a commit fee8bd3
Show file tree
Hide file tree
Showing 10 changed files with 145 additions and 159 deletions.
75 changes: 38 additions & 37 deletions yarn-project/archiver/src/archiver/archiver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ import { Chain, HttpTransport, PublicClient, createPublicClient, http } from 'vi
import { ArchiverDataStore } from './archiver_store.js';
import { ArchiverConfig } from './config.js';
import {
DataRetrieval,
retrieveBlockBodiesFromAvailabilityOracle,
retrieveBlockMetadataFromRollup,
retrieveL1ToL2Messages,
Expand Down Expand Up @@ -214,50 +215,50 @@ export class Archiver implements ArchiveSource {
);

const blockBodies = retrievedBlockBodies.retrievedData.map(([blockBody]) => blockBody);

await this.store.addBlockBodies(blockBodies);

const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup(
this.publicClient,
this.rollupAddress,
blockUntilSynced,
lastL1Blocks.blocks + 1n,
currentL1BlockNumber,
nextExpectedL2BlockNum,
);

const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map(
([header]) => header.contentCommitment.txsEffectsHash,
);
// Now that we have block bodies we will retrieve block metadata and build L2 blocks from the bodies and
// the metadata
let retrievedBlocks: DataRetrieval<L2Block>;
{
const retrievedBlockMetadata = await retrieveBlockMetadataFromRollup(
this.publicClient,
this.rollupAddress,
blockUntilSynced,
lastL1Blocks.blocks + 1n,
currentL1BlockNumber,
nextExpectedL2BlockNum,
);

const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes);
const retrievedBodyHashes = retrievedBlockMetadata.retrievedData.map(
([header]) => header.contentCommitment.txsEffectsHash,
);

if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) {
throw new Error('Block headers length does not equal block bodies length');
}
const blockBodiesFromStore = await this.store.getBlockBodies(retrievedBodyHashes);

const retrievedBlocks = {
retrievedData: retrievedBlockMetadata.retrievedData.map(
(blockMetadata, i) =>
new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i], blockMetadata[2]),
),
};
if (retrievedBlockMetadata.retrievedData.length !== blockBodiesFromStore.length) {
throw new Error('Block headers length does not equal block bodies length');
}

if (retrievedBlocks.retrievedData.length === 0) {
return;
} else {
this.log(
`Retrieved ${retrievedBlocks.retrievedData.length} new L2 blocks between L1 blocks ${
lastL1Blocks.blocks + 1n
} and ${currentL1BlockNumber}.`,
const blocks = retrievedBlockMetadata.retrievedData.map(
(blockMetadata, i) => new L2Block(blockMetadata[1], blockMetadata[0], blockBodiesFromStore[i]),
);
}

// create the block number -> block hash mapping to ensure we retrieve the appropriate events
const blockNumberToBodyHash: { [key: number]: Buffer | undefined } = {};
retrievedBlocks.retrievedData.forEach((block: L2Block) => {
blockNumberToBodyHash[block.number] = block.header.contentCommitment.txsEffectsHash;
});
if (blocks.length === 0) {
return;
} else {
this.log(
`Retrieved ${blocks.length} new L2 blocks between L1 blocks ${
lastL1Blocks.blocks + 1n
} and ${currentL1BlockNumber}.`,
);
}

retrievedBlocks = {
lastProcessedL1BlockNumber: retrievedBlockMetadata.lastProcessedL1BlockNumber,
retrievedData: blocks,
};
}

await Promise.all(
retrievedBlocks.retrievedData.map(block => {
Expand All @@ -280,7 +281,7 @@ export class Archiver implements ArchiveSource {
}),
);

await this.store.addBlocks(retrievedBlocks.retrievedData);
await this.store.addBlocks(retrievedBlocks);
}

/**
Expand Down
4 changes: 2 additions & 2 deletions yarn-project/archiver/src/archiver/archiver_store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,10 @@ export type ArchiverL1SynchPoint = {
export interface ArchiverDataStore {
/**
* Append new blocks to the store's list.
* @param blocks - The L2 blocks to be added to the store.
* @param blocks - The L2 blocks to be added to the store and the last processed L1 block.
* @returns True if the operation is successful.
*/
addBlocks(blocks: L2Block[]): Promise<boolean>;
addBlocks(blocks: DataRetrieval<L2Block>): Promise<boolean>;

/**
* Append new block bodies to the store's list.
Expand Down
91 changes: 53 additions & 38 deletions yarn-project/archiver/src/archiver/archiver_store_test_suite.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { randomBytes, randomInt } from '@aztec/foundation/crypto';
import { ContractClassPublic, ContractInstanceWithAddress, SerializableContractInstance } from '@aztec/types/contracts';

import { ArchiverDataStore } from './archiver_store.js';
import { DataRetrieval } from './data_retrieval.js';

/**
* @param testName - The name of the test suite.
Expand All @@ -14,27 +15,26 @@ import { ArchiverDataStore } from './archiver_store.js';
export function describeArchiverDataStore(testName: string, getStore: () => ArchiverDataStore) {
describe(testName, () => {
let store: ArchiverDataStore;
let blocks: L2Block[];
let blocks: DataRetrieval<L2Block>;
const blockTests: [number, number, () => L2Block[]][] = [
[1, 1, () => blocks.slice(0, 1)],
[10, 1, () => blocks.slice(9, 10)],
[1, 10, () => blocks.slice(0, 10)],
[2, 5, () => blocks.slice(1, 6)],
[5, 2, () => blocks.slice(4, 6)],
[1, 1, () => blocks.retrievedData.slice(0, 1)],
[10, 1, () => blocks.retrievedData.slice(9, 10)],
[1, 10, () => blocks.retrievedData.slice(0, 10)],
[2, 5, () => blocks.retrievedData.slice(1, 6)],
[5, 2, () => blocks.retrievedData.slice(4, 6)],
];

beforeEach(() => {
store = getStore();
blocks = Array.from({ length: 10 }).map((_, i) => {
const block = L2Block.random(i + 1);
block.setL1BlockNumber(BigInt(i + 1));
return block;
});
blocks = {
lastProcessedL1BlockNumber: 5n,
retrievedData: Array.from({ length: 10 }).map((_, i) => L2Block.random(i + 1)),
};
});

describe('addBlocks', () => {
it('returns success when adding block bodies', async () => {
await expect(store.addBlockBodies(blocks.map(block => block.body))).resolves.toBe(true);
await expect(store.addBlockBodies(blocks.retrievedData.map(block => block.body))).resolves.toBe(true);
});

it('returns success when adding blocks', async () => {
Expand All @@ -50,7 +50,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
describe('getBlocks', () => {
beforeEach(async () => {
await store.addBlocks(blocks);
await store.addBlockBodies(blocks.map(block => block.body));
await store.addBlockBodies(blocks.retrievedData.map(block => block.body));
});

it.each(blockTests)('retrieves previously stored blocks', async (start, limit, getExpectedBlocks) => {
Expand All @@ -66,7 +66,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
});

it('resets `from` to the first block if it is out of range', async () => {
await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.slice(0, 1));
await expect(store.getBlocks(INITIAL_L2_BLOCK_NUM - 100, 1)).resolves.toEqual(blocks.retrievedData.slice(0, 1));
});
});

Expand All @@ -77,7 +77,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch

it("returns the most recently added block's number", async () => {
await store.addBlocks(blocks);
await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.at(-1)!.number);
await expect(store.getSynchedL2BlockNumber()).resolves.toEqual(blocks.retrievedData.at(-1)!.number);
});
});

Expand All @@ -92,7 +92,7 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
it('returns the L1 block number in which the most recent L2 block was published', async () => {
await store.addBlocks(blocks);
await expect(store.getSynchedL1BlockNumbers()).resolves.toEqual({
blocks: blocks.at(-1)!.getL1BlockNumber(),
blocks: blocks.lastProcessedL1BlockNumber,
messages: 0n,
});
});
Expand All @@ -109,7 +109,11 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
describe('addLogs', () => {
it('adds encrypted & unencrypted logs', async () => {
await expect(
store.addLogs(blocks[0].body.encryptedLogs, blocks[0].body.unencryptedLogs, blocks[0].number),
store.addLogs(
blocks.retrievedData[0].body.encryptedLogs,
blocks.retrievedData[0].body.unencryptedLogs,
blocks.retrievedData[0].number,
),
).resolves.toEqual(true);
});
});
Expand All @@ -120,7 +124,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
])('getLogs (%s)', (_, logType) => {
beforeEach(async () => {
await Promise.all(
blocks.map(block => store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)),
blocks.retrievedData.map(block =>
store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number),
),
);
});

Expand All @@ -136,18 +142,20 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
describe('getTxEffect', () => {
beforeEach(async () => {
await Promise.all(
blocks.map(block => store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)),
blocks.retrievedData.map(block =>
store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number),
),
);
await store.addBlocks(blocks);
await store.addBlockBodies(blocks.map(block => block.body));
await store.addBlockBodies(blocks.retrievedData.map(block => block.body));
});

it.each([
() => blocks[0].getTx(0),
() => blocks[9].getTx(3),
() => blocks[3].getTx(1),
() => blocks[5].getTx(2),
() => blocks[1].getTx(0),
() => blocks.retrievedData[0].getTx(0),
() => blocks.retrievedData[9].getTx(3),
() => blocks.retrievedData[3].getTx(1),
() => blocks.retrievedData[5].getTx(2),
() => blocks.retrievedData[1].getTx(0),
])('retrieves a previously stored transaction', async getExpectedTx => {
const expectedTx = getExpectedTx();
const actualTx = await store.getTxEffect(expectedTx.txHash);
Expand Down Expand Up @@ -238,28 +246,33 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
const numPublicFunctionCalls = 3;
const numUnencryptedLogs = 4;
const numBlocks = 10;
let blocks: L2Block[];
let blocks: DataRetrieval<L2Block>;

beforeEach(async () => {
blocks = Array(numBlocks)
.fill(0)
.map((_, index: number) =>
L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs),
);
blocks = {
lastProcessedL1BlockNumber: 4n,
retrievedData: Array(numBlocks)
.fill(0)
.map((_, index: number) =>
L2Block.random(index + 1, txsPerBlock, 2, numPublicFunctionCalls, 2, numUnencryptedLogs),
),
};

await store.addBlocks(blocks);
await store.addBlockBodies(blocks.map(block => block.body));
await store.addBlockBodies(blocks.retrievedData.map(block => block.body));

await Promise.all(
blocks.map(block => store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number)),
blocks.retrievedData.map(block =>
store.addLogs(block.body.encryptedLogs, block.body.unencryptedLogs, block.number),
),
);
});

it('"txHash" filter param is respected', async () => {
// get random tx
const targetBlockIndex = randomInt(numBlocks);
const targetTxIndex = randomInt(txsPerBlock);
const targetTxHash = new L2BlockContext(blocks[targetBlockIndex]).getTxHash(targetTxIndex);
const targetTxHash = new L2BlockContext(blocks.retrievedData[targetBlockIndex]).getTxHash(targetTxIndex);

const response = await store.getUnencryptedLogs({ txHash: targetTxHash });
const logs = response.logs;
Expand Down Expand Up @@ -303,8 +316,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
const targetFunctionLogIndex = randomInt(numPublicFunctionCalls);
const targetLogIndex = randomInt(numUnencryptedLogs);
const targetContractAddress = UnencryptedL2Log.fromBuffer(
blocks[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[targetFunctionLogIndex]
.logs[targetLogIndex],
blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[
targetFunctionLogIndex
].logs[targetLogIndex],
).contractAddress;

const response = await store.getUnencryptedLogs({ contractAddress: targetContractAddress });
Expand All @@ -323,8 +337,9 @@ export function describeArchiverDataStore(testName: string, getStore: () => Arch
const targetFunctionLogIndex = randomInt(numPublicFunctionCalls);
const targetLogIndex = randomInt(numUnencryptedLogs);
const targetSelector = UnencryptedL2Log.fromBuffer(
blocks[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[targetFunctionLogIndex]
.logs[targetLogIndex],
blocks.retrievedData[targetBlockIndex].body.txEffects[targetTxIndex].unencryptedLogs.functionLogs[
targetFunctionLogIndex
].logs[targetLogIndex],
).selector;

const response = await store.getUnencryptedLogs({ selector: targetSelector });
Expand Down
6 changes: 3 additions & 3 deletions yarn-project/archiver/src/archiver/data_retrieval.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ type DataRetrieval<T> = {
* @param searchStartBlock - The block number to use for starting the search.
* @param searchEndBlock - The highest block number that we should search up to.
* @param expectedNextL2BlockNum - The next L2 block number that we expect to find.
* @returns An array of tuples representing block metadata including the header, archive tree snapshot, and associated l1 block number; as well as the next eth block to search from.
* @returns An array of tuples representing block metadata including the header, archive tree snapshot; as well as the next eth block to search from.
*/
export async function retrieveBlockMetadataFromRollup(
publicClient: PublicClient,
Expand All @@ -44,8 +44,8 @@ export async function retrieveBlockMetadataFromRollup(
searchStartBlock: bigint,
searchEndBlock: bigint,
expectedNextL2BlockNum: bigint,
): Promise<DataRetrieval<[Header, AppendOnlyTreeSnapshot, bigint]>> {
const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, bigint][] = [];
): Promise<DataRetrieval<[Header, AppendOnlyTreeSnapshot]>> {
const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = [];
do {
if (searchStartBlock > searchEndBlock) {
break;
Expand Down
8 changes: 4 additions & 4 deletions yarn-project/archiver/src/archiver/eth_log_handlers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,14 +28,14 @@ export function processLeafInsertedLogs(
* @param publicClient - The viem public client to use for transaction retrieval.
* @param expectedL2BlockNumber - The next expected L2 block number.
* @param logs - L2BlockProcessed logs.
* @returns - An array of tuples representing block metadata including the header, archive tree snapshot, and associated l1 block number.
* @returns - An array of tuples representing block metadata including the header, archive tree snapshot.
*/
export async function processL2BlockProcessedLogs(
publicClient: PublicClient,
expectedL2BlockNumber: bigint,
logs: Log<bigint, number, false, undefined, true, typeof RollupAbi, 'L2BlockProcessed'>[],
): Promise<[Header, AppendOnlyTreeSnapshot, bigint][]> {
const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot, bigint][] = [];
): Promise<[Header, AppendOnlyTreeSnapshot][]> {
const retrievedBlockMetadata: [Header, AppendOnlyTreeSnapshot][] = [];
for (const log of logs) {
const blockNum = log.args.blockNumber;
if (blockNum !== expectedL2BlockNumber) {
Expand All @@ -48,7 +48,7 @@ export async function processL2BlockProcessedLogs(
log.args.blockNumber,
);

retrievedBlockMetadata.push([header, archive, log.blockNumber!]);
retrievedBlockMetadata.push([header, archive]);
expectedL2BlockNumber++;
}

Expand Down
Loading

0 comments on commit fee8bd3

Please sign in to comment.