From d65926cb869054b27ede443adfd512b1cc43de4a Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Thu, 19 Sep 2024 18:59:54 -0300 Subject: [PATCH 1/2] feat: Light block builder Adds a block builder that assembles an L2Block out of a set of processed txs without relying on base, merge, block root, or parity circuits, and works using only ts code. Keeps the same interface as the current block builder for drop-in replacement within the orchestrator. --- yarn-project/circuit-types/package.json | 1 + yarn-project/circuit-types/src/body.ts | 47 +-- .../circuit-types/src/merkle_tree_id.ts | 19 ++ .../circuit-types/src/test/factories.ts | 63 ++++ yarn-project/circuit-types/src/test/index.ts | 1 + yarn-project/circuit-types/src/tx_effect.ts | 53 +-- .../circuits.js/src/merkle/merkle_tree.ts | 4 +- .../circuits.js/src/structs/gas_settings.ts | 5 +- .../circuits.js/src/tests/factories.ts | 33 +- .../foundation/src/serialize/serialize.ts | 2 +- yarn-project/foundation/src/trees/index.ts | 2 + .../src/trees/unbalanced_merkle_root.ts | 52 +++ yarn-project/prover-client/package.json | 5 +- .../prover-client/src/mocks/fixtures.ts | 50 +-- .../prover-client/src/mocks/test_context.ts | 2 +- .../orchestrator/block-building-helpers.ts | 64 +++- .../src/orchestrator/orchestrator.ts | 34 +- .../src/block_builder/index.ts | 54 +--- .../src/block_builder/light.test.ts | 303 ++++++++++++++++++ .../src/block_builder/light.ts | 164 ++++++++++ .../src/block_builder/orchestrator.ts | 51 +++ .../src/client/sequencer-client.ts | 4 +- .../src/world-state-db/merkle_trees.ts | 10 + 23 files changed, 791 insertions(+), 232 deletions(-) create mode 100644 yarn-project/circuit-types/src/test/factories.ts create mode 100644 yarn-project/circuit-types/src/test/index.ts create mode 100644 yarn-project/foundation/src/trees/unbalanced_merkle_root.ts create mode 100644 yarn-project/sequencer-client/src/block_builder/light.test.ts create mode 100644 yarn-project/sequencer-client/src/block_builder/light.ts create mode 100644 yarn-project/sequencer-client/src/block_builder/orchestrator.ts diff --git a/yarn-project/circuit-types/package.json b/yarn-project/circuit-types/package.json index d72351c6ecb..6e874e8b129 100644 --- a/yarn-project/circuit-types/package.json +++ b/yarn-project/circuit-types/package.json @@ -8,6 +8,7 @@ "./jest": "./dest/jest/index.js", "./interfaces": "./dest/interfaces/index.js", "./log_id": "./dest/logs/log_id.js", + "./test": "./dest/test/index.js", "./tx_hash": "./dest/tx/tx_hash.js" }, "typedocOptions": { diff --git a/yarn-project/circuit-types/src/body.ts b/yarn-project/circuit-types/src/body.ts index 21eba85c30e..dcc41c060b6 100644 --- a/yarn-project/circuit-types/src/body.ts +++ b/yarn-project/circuit-types/src/body.ts @@ -4,9 +4,8 @@ import { TxEffect, UnencryptedL2BlockL2Logs, } from '@aztec/circuit-types'; -import { padArrayEnd } from '@aztec/foundation/collection'; -import { sha256Trunc } from '@aztec/foundation/crypto'; import { BufferReader, serializeToBuffer } from '@aztec/foundation/serialize'; +import { computeUnbalancedMerkleRoot } from '@aztec/foundation/trees'; import { inspect } from 'util'; @@ -52,49 +51,9 @@ export class Body { * @returns The txs effects hash. */ getTxsEffectsHash() { - // Adapted from proving-state.ts -> findMergeLevel and unbalanced_tree.ts - // Calculates the tree upwards layer by layer until we reach the root - // The L1 calculation instead computes the tree from right to left (slightly cheaper gas) - // TODO: A more thorough investigation of which method is cheaper, then use that method everywhere - const computeRoot = (leaves: Buffer[]): Buffer => { - const depth = Math.ceil(Math.log2(leaves.length)); - let [layerWidth, nodeToShift] = - leaves.length & 1 ? [leaves.length - 1, leaves[leaves.length - 1]] : [leaves.length, Buffer.alloc(0)]; - // Allocate this layer's leaves and init the next layer up - let thisLayer = leaves.slice(0, layerWidth); - let nextLayer = []; - for (let i = 0; i < depth; i++) { - for (let j = 0; j < layerWidth; j += 2) { - // Store the hash of each pair one layer up - nextLayer[j / 2] = sha256Trunc(Buffer.concat([thisLayer[j], thisLayer[j + 1]])); - } - layerWidth /= 2; - if (layerWidth & 1) { - if (nodeToShift.length) { - // If the next layer has odd length, and we have a node that needs to be shifted up, add it here - nextLayer.push(nodeToShift); - layerWidth += 1; - nodeToShift = Buffer.alloc(0); - } else { - // If we don't have a node waiting to be shifted, store the next layer's final node to be shifted - layerWidth -= 1; - nodeToShift = nextLayer[layerWidth]; - } - } - // reset the layers - thisLayer = nextLayer; - nextLayer = []; - } - // return the root - return thisLayer[0]; - }; - const emptyTxEffectHash = TxEffect.empty().hash(); - let leaves: Buffer[] = this.txEffects.map(txEffect => txEffect.hash()); - if (leaves.length < 2) { - leaves = padArrayEnd(leaves, emptyTxEffectHash, 2); - } - return computeRoot(leaves); + const leaves: Buffer[] = this.txEffects.map(txEffect => txEffect.hash()); + return computeUnbalancedMerkleRoot(leaves, emptyTxEffectHash); } get noteEncryptedLogs(): EncryptedNoteL2BlockL2Logs { diff --git a/yarn-project/circuit-types/src/merkle_tree_id.ts b/yarn-project/circuit-types/src/merkle_tree_id.ts index 25ab0897284..785d00ad2a1 100644 --- a/yarn-project/circuit-types/src/merkle_tree_id.ts +++ b/yarn-project/circuit-types/src/merkle_tree_id.ts @@ -1,8 +1,13 @@ import { + ARCHIVE_HEIGHT, ARCHIVE_TREE_ID, L1_TO_L2_MESSAGE_TREE_ID, + L1_TO_L2_MSG_TREE_HEIGHT, + NOTE_HASH_TREE_HEIGHT, NOTE_HASH_TREE_ID, + NULLIFIER_TREE_HEIGHT, NULLIFIER_TREE_ID, + PUBLIC_DATA_TREE_HEIGHT, PUBLIC_DATA_TREE_ID, } from '@aztec/circuits.js'; @@ -21,3 +26,17 @@ export enum MerkleTreeId { export const merkleTreeIds = () => { return Object.values(MerkleTreeId).filter((v): v is MerkleTreeId => !isNaN(Number(v))); }; + +const TREE_HEIGHTS = { + [MerkleTreeId.NOTE_HASH_TREE]: NOTE_HASH_TREE_HEIGHT, + [MerkleTreeId.ARCHIVE]: ARCHIVE_HEIGHT, + [MerkleTreeId.L1_TO_L2_MESSAGE_TREE]: L1_TO_L2_MSG_TREE_HEIGHT, + [MerkleTreeId.NULLIFIER_TREE]: NULLIFIER_TREE_HEIGHT, + [MerkleTreeId.PUBLIC_DATA_TREE]: PUBLIC_DATA_TREE_HEIGHT, +} as const; + +export type TreeHeights = typeof TREE_HEIGHTS; + +export function getTreeHeight(treeId: TID): TreeHeights[TID] { + return TREE_HEIGHTS[treeId]; +} diff --git a/yarn-project/circuit-types/src/test/factories.ts b/yarn-project/circuit-types/src/test/factories.ts new file mode 100644 index 00000000000..51df5cd43c5 --- /dev/null +++ b/yarn-project/circuit-types/src/test/factories.ts @@ -0,0 +1,63 @@ +import { type MerkleTreeOperations, makeProcessedTx, mockTx } from '@aztec/circuit-types'; +import { + Fr, + GasSettings, + type Header, + KernelCircuitPublicInputs, + LogHash, + MAX_L2_TO_L1_MSGS_PER_TX, + MAX_NOTE_HASHES_PER_TX, + MAX_NULLIFIERS_PER_TX, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + PublicDataUpdateRequest, + ScopedLogHash, +} from '@aztec/circuits.js'; +import { makeScopedL2ToL1Message } from '@aztec/circuits.js/testing'; +import { makeTuple } from '@aztec/foundation/array'; + +/** Makes a bloated processed tx for testing purposes. */ +export function makeBloatedProcessedTx( + historicalHeaderOrDb: Header | MerkleTreeOperations, + vkRoot: Fr, + seed = 0x1, + overrides: { inclusionFee?: Fr } = {}, +) { + seed *= MAX_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds + const tx = mockTx(seed); + const kernelOutput = KernelCircuitPublicInputs.empty(); + kernelOutput.constants.vkTreeRoot = vkRoot; + kernelOutput.constants.historicalHeader = + 'getInitialHeader' in historicalHeaderOrDb ? historicalHeaderOrDb.getInitialHeader() : historicalHeaderOrDb; + kernelOutput.end.publicDataUpdateRequests = makeTuple( + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + i => new PublicDataUpdateRequest(new Fr(i), new Fr(i + 10), i + 20), + seed + 0x500, + ); + kernelOutput.end.publicDataUpdateRequests = makeTuple( + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + i => new PublicDataUpdateRequest(new Fr(i), new Fr(i + 10), i + 20), + seed + 0x600, + ); + + kernelOutput.constants.txContext.gasSettings = GasSettings.default({ inclusionFee: overrides.inclusionFee }); + + const processedTx = makeProcessedTx(tx, kernelOutput, []); + + processedTx.data.end.noteHashes = makeTuple(MAX_NOTE_HASHES_PER_TX, i => new Fr(i), seed + 0x100); + processedTx.data.end.nullifiers = makeTuple(MAX_NULLIFIERS_PER_TX, i => new Fr(i), seed + 0x100000); + + processedTx.data.end.nullifiers[tx.data.forPublic!.end.nullifiers.length - 1] = Fr.zero(); + + processedTx.data.end.l2ToL1Msgs = makeTuple(MAX_L2_TO_L1_MSGS_PER_TX, makeScopedL2ToL1Message, seed + 0x300); + processedTx.noteEncryptedLogs.unrollLogs().forEach((log, i) => { + processedTx.data.end.noteEncryptedLogsHashes[i] = new LogHash(Fr.fromBuffer(log.hash()), 0, new Fr(log.length)); + }); + processedTx.encryptedLogs.unrollLogs().forEach((log, i) => { + processedTx.data.end.encryptedLogsHashes[i] = new ScopedLogHash( + new LogHash(Fr.fromBuffer(log.hash()), 0, new Fr(log.length)), + log.maskedContractAddress, + ); + }); + + return processedTx; +} diff --git a/yarn-project/circuit-types/src/test/index.ts b/yarn-project/circuit-types/src/test/index.ts new file mode 100644 index 00000000000..e24620a0a23 --- /dev/null +++ b/yarn-project/circuit-types/src/test/index.ts @@ -0,0 +1 @@ +export * from './factories.js'; diff --git a/yarn-project/circuit-types/src/tx_effect.ts b/yarn-project/circuit-types/src/tx_effect.ts index 4ee1cf633ca..1115f1b2911 100644 --- a/yarn-project/circuit-types/src/tx_effect.ts +++ b/yarn-project/circuit-types/src/tx_effect.ts @@ -146,33 +146,10 @@ export class TxEffect { */ hash() { const padBuffer = (buf: Buffer, length: number) => Buffer.concat([buf, Buffer.alloc(length - buf.length)]); - // Below follows computeTxOutHash in TxsDecoder.sol and new_sha in variable_merkle_tree.nr - // TODO(#7218): Revert to fixed height tree for outbox - const computeTxOutHash = (l2ToL1Msgs: Fr[]) => { - if (l2ToL1Msgs.length == 0) { - return Buffer.alloc(32); - } - const depth = l2ToL1Msgs.length == 1 ? 1 : Math.ceil(Math.log2(l2ToL1Msgs.length)); - let thisLayer = padArrayEnd( - l2ToL1Msgs.map(msg => msg.toBuffer()), - Buffer.alloc(32), - 2 ** depth, - ); - let nextLayer = []; - for (let i = 0; i < depth; i++) { - for (let j = 0; j < thisLayer.length; j += 2) { - // Store the hash of each pair one layer up - nextLayer[j / 2] = sha256Trunc(Buffer.concat([thisLayer[j], thisLayer[j + 1]])); - } - thisLayer = nextLayer; - nextLayer = []; - } - return thisLayer[0]; - }; const noteHashesBuffer = padBuffer(serializeToBuffer(this.noteHashes), Fr.SIZE_IN_BYTES * MAX_NOTE_HASHES_PER_TX); const nullifiersBuffer = padBuffer(serializeToBuffer(this.nullifiers), Fr.SIZE_IN_BYTES * MAX_NULLIFIERS_PER_TX); - const outHashBuffer = computeTxOutHash(this.l2ToL1Msgs); + const outHashBuffer = this.txOutHash(); const publicDataWritesBuffer = padBuffer( serializeToBuffer(this.publicDataWrites), PublicDataWrite.SIZE_IN_BYTES * MAX_TOTAL_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, @@ -200,6 +177,34 @@ export class TxEffect { return sha256Trunc(inputValue); } + /** + * Computes txOutHash of this tx effect. + * TODO(#7218): Revert to fixed height tree for outbox + * @dev Follows computeTxOutHash in TxsDecoder.sol and new_sha in variable_merkle_tree.nr + */ + txOutHash() { + const { l2ToL1Msgs } = this; + if (l2ToL1Msgs.length == 0) { + return Buffer.alloc(32); + } + const depth = l2ToL1Msgs.length == 1 ? 1 : Math.ceil(Math.log2(l2ToL1Msgs.length)); + let thisLayer = padArrayEnd( + l2ToL1Msgs.map(msg => msg.toBuffer()), + Buffer.alloc(32), + 2 ** depth, + ); + let nextLayer = []; + for (let i = 0; i < depth; i++) { + for (let j = 0; j < thisLayer.length; j += 2) { + // Store the hash of each pair one layer up + nextLayer[j / 2] = sha256Trunc(Buffer.concat([thisLayer[j], thisLayer[j + 1]])); + } + thisLayer = nextLayer; + nextLayer = []; + } + return thisLayer[0]; + } + static random( numPrivateCallsPerTx = 2, numPublicCallsPerTx = 3, diff --git a/yarn-project/circuits.js/src/merkle/merkle_tree.ts b/yarn-project/circuits.js/src/merkle/merkle_tree.ts index 53d516960d5..aa7ea221337 100644 --- a/yarn-project/circuits.js/src/merkle/merkle_tree.ts +++ b/yarn-project/circuits.js/src/merkle/merkle_tree.ts @@ -47,7 +47,7 @@ export class MerkleTree { } /** Returns a nice string representation of the tree, useful for debugging purposes. */ - public drawTree() { + public drawTree(elemSize = 8) { const levels: string[][] = []; const tree = this.nodes; const maxRowSize = Math.ceil(tree.length / 2); @@ -58,7 +58,7 @@ export class MerkleTree { levels.push( tree .slice(rowOffset, rowOffset + rowSize) - .map(n => n.toString('hex').slice(0, 8) + ' '.repeat((paddingSize - 1) * 9)), + .map(n => n.toString('hex').slice(0, elemSize) + ' '.repeat((paddingSize - 1) * (elemSize + 1))), ); rowOffset += rowSize; paddingSize <<= 1; diff --git a/yarn-project/circuits.js/src/structs/gas_settings.ts b/yarn-project/circuits.js/src/structs/gas_settings.ts index 7ef113055e9..8c2b6860efc 100644 --- a/yarn-project/circuits.js/src/structs/gas_settings.ts +++ b/yarn-project/circuits.js/src/structs/gas_settings.ts @@ -1,3 +1,4 @@ +import { compact } from '@aztec/foundation/collection'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, FieldReader, serializeToBuffer, serializeToFields } from '@aztec/foundation/serialize'; import { type FieldsOf } from '@aztec/foundation/types'; @@ -66,13 +67,13 @@ export class GasSettings { } /** Default gas settings to use when user has not provided them. */ - static default(overrides?: Partial>) { + static default(overrides: Partial> = {}) { return GasSettings.from({ gasLimits: { l2Gas: DEFAULT_GAS_LIMIT, daGas: DEFAULT_GAS_LIMIT }, teardownGasLimits: { l2Gas: DEFAULT_TEARDOWN_GAS_LIMIT, daGas: DEFAULT_TEARDOWN_GAS_LIMIT }, maxFeesPerGas: { feePerL2Gas: new Fr(DEFAULT_MAX_FEE_PER_GAS), feePerDaGas: new Fr(DEFAULT_MAX_FEE_PER_GAS) }, inclusionFee: new Fr(DEFAULT_INCLUSION_FEE), - ...overrides, + ...compact(overrides), }); } diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 90dbbec53de..8c45dbad247 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -1,6 +1,7 @@ import { type FieldsOf, makeHalfFullTuple, makeTuple } from '@aztec/foundation/array'; import { AztecAddress } from '@aztec/foundation/aztec-address'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; +import { compact } from '@aztec/foundation/collection'; import { EthAddress } from '@aztec/foundation/eth-address'; import { type Bufferable } from '@aztec/foundation/serialize'; import { @@ -789,21 +790,18 @@ export function makePrivateCircuitPublicInputs(seed = 0): PrivateCircuitPublicIn }); } -export function makeGlobalVariables( - seed = 1, - blockNumber: number | undefined = undefined, - slotNumber: number | undefined = undefined, -): GlobalVariables { - return new GlobalVariables( - new Fr(seed), - new Fr(seed + 1), - new Fr(blockNumber ?? seed + 2), - new Fr(slotNumber ?? seed + 3), - new Fr(seed + 4), - EthAddress.fromField(new Fr(seed + 5)), - AztecAddress.fromField(new Fr(seed + 6)), - new GasFees(new Fr(seed + 7), new Fr(seed + 8)), - ); +export function makeGlobalVariables(seed = 1, overrides: Partial> = {}): GlobalVariables { + return GlobalVariables.from({ + chainId: new Fr(seed), + version: new Fr(seed + 1), + blockNumber: new Fr(seed + 2), + slotNumber: new Fr(seed + 3), + timestamp: new Fr(seed + 4), + coinbase: EthAddress.fromField(new Fr(seed + 5)), + feeRecipient: AztecAddress.fromField(new Fr(seed + 6)), + gasFees: new GasFees(new Fr(seed + 7), new Fr(seed + 8)), + ...compact(overrides), + }); } export function makeGasFees(seed = 1) { @@ -1076,7 +1074,10 @@ export function makeHeader( makeAppendOnlyTreeSnapshot(seed + 0x100), makeContentCommitment(seed + 0x200, txsEffectsHash), makeStateReference(seed + 0x600), - makeGlobalVariables((seed += 0x700), blockNumber, slotNumber), + makeGlobalVariables((seed += 0x700), { + ...(blockNumber ? { blockNumber: new Fr(blockNumber) } : {}), + ...(slotNumber ? { slotNumber: new Fr(slotNumber) } : {}), + }), fr(seed + 0x800), ); } diff --git a/yarn-project/foundation/src/serialize/serialize.ts b/yarn-project/foundation/src/serialize/serialize.ts index ae305070714..6698a7081e2 100644 --- a/yarn-project/foundation/src/serialize/serialize.ts +++ b/yarn-project/foundation/src/serialize/serialize.ts @@ -242,7 +242,7 @@ export function toFriendlyJSON(obj: object): string { ).toFriendlyJSON ) { return value.toFriendlyJSON(); - } else if (value && value.type && ['Fr', 'Fq', 'AztecAddress'].includes(value.type)) { + } else if (value && value.type && ['Fr', 'Fq', 'AztecAddress', 'EthAddress'].includes(value.type)) { return value.value; } else { return value; diff --git a/yarn-project/foundation/src/trees/index.ts b/yarn-project/foundation/src/trees/index.ts index 60b025a9e13..ccaf59eb8fe 100644 --- a/yarn-project/foundation/src/trees/index.ts +++ b/yarn-project/foundation/src/trees/index.ts @@ -1,3 +1,5 @@ +export * from './unbalanced_merkle_root.js'; + /** * A leaf of an indexed merkle tree. */ diff --git a/yarn-project/foundation/src/trees/unbalanced_merkle_root.ts b/yarn-project/foundation/src/trees/unbalanced_merkle_root.ts new file mode 100644 index 00000000000..2df6765afc7 --- /dev/null +++ b/yarn-project/foundation/src/trees/unbalanced_merkle_root.ts @@ -0,0 +1,52 @@ +import { padArrayEnd } from '@aztec/foundation/collection'; +import { sha256Trunc } from '@aztec/foundation/crypto'; + +/** + * Computes the merkle root for an unbalanced tree. + * + * @dev Adapted from proving-state.ts -> findMergeLevel and unbalanced_tree.ts. + * Calculates the tree upwards layer by layer until we reach the root. + * The L1 calculation instead computes the tree from right to left (slightly cheaper gas). + * TODO: A more thorough investigation of which method is cheaper, then use that method everywhere. + */ +export function computeUnbalancedMerkleRoot(leaves: Buffer[], emptyLeaf?: Buffer, hasher = sha256Trunc): Buffer { + // Pad leaves to 2 + if (leaves.length < 2) { + if (emptyLeaf === undefined) { + throw new Error('Cannot compute a Merkle root with less than 2 leaves'); + } else { + leaves = padArrayEnd(leaves, emptyLeaf, 2); + } + } + + const depth = Math.ceil(Math.log2(leaves.length)); + let [layerWidth, nodeToShift] = + leaves.length & 1 ? [leaves.length - 1, leaves[leaves.length - 1]] : [leaves.length, Buffer.alloc(0)]; + // Allocate this layer's leaves and init the next layer up + let thisLayer = leaves.slice(0, layerWidth); + let nextLayer = []; + for (let i = 0; i < depth; i++) { + for (let j = 0; j < layerWidth; j += 2) { + // Store the hash of each pair one layer up + nextLayer[j / 2] = hasher(Buffer.concat([thisLayer[j], thisLayer[j + 1]])); + } + layerWidth /= 2; + if (layerWidth & 1) { + if (nodeToShift.length) { + // If the next layer has odd length, and we have a node that needs to be shifted up, add it here + nextLayer.push(nodeToShift); + layerWidth += 1; + nodeToShift = Buffer.alloc(0); + } else { + // If we don't have a node waiting to be shifted, store the next layer's final node to be shifted + layerWidth -= 1; + nodeToShift = nextLayer[layerWidth]; + } + } + // reset the layers + thisLayer = nextLayer; + nextLayer = []; + } + // return the root + return thisLayer[0]; +} diff --git a/yarn-project/prover-client/package.json b/yarn-project/prover-client/package.json index 4171e53f781..636bdcac423 100644 --- a/yarn-project/prover-client/package.json +++ b/yarn-project/prover-client/package.json @@ -5,7 +5,8 @@ "exports": { ".": "./dest/index.js", "./prover-agent": "./dest/prover-agent/index.js", - "./orchestrator": "./dest/orchestrator/index.js" + "./orchestrator": "./dest/orchestrator/index.js", + "./helpers": "./dest/orchestrator/block-building-helpers.js" }, "typedocOptions": { "entryPoints": [ @@ -95,4 +96,4 @@ "engines": { "node": ">=18" } -} +} \ No newline at end of file diff --git a/yarn-project/prover-client/src/mocks/fixtures.ts b/yarn-project/prover-client/src/mocks/fixtures.ts index eb8ab0fc407..d0421e00455 100644 --- a/yarn-project/prover-client/src/mocks/fixtures.ts +++ b/yarn-project/prover-client/src/mocks/fixtures.ts @@ -2,29 +2,20 @@ import { MerkleTreeId, type ProcessedTx, makeEmptyProcessedTx as makeEmptyProcessedTxFromHistoricalTreeRoots, - makeProcessedTx, - mockTx, } from '@aztec/circuit-types'; +import { makeBloatedProcessedTx as makeBloatedProcessedTxWithVKRoot } from '@aztec/circuit-types/test'; import { AztecAddress, EthAddress, Fr, GasFees, GlobalVariables, - KernelCircuitPublicInputs, - LogHash, - MAX_L2_TO_L1_MSGS_PER_TX, MAX_NOTE_HASHES_PER_TX, MAX_NULLIFIERS_PER_TX, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, NULLIFIER_TREE_HEIGHT, PUBLIC_DATA_SUBTREE_HEIGHT, PublicDataTreeLeaf, - PublicDataUpdateRequest, - ScopedLogHash, } from '@aztec/circuits.js'; -import { fr, makeScopedL2ToL1Message } from '@aztec/circuits.js/testing'; -import { makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; import { randomBytes } from '@aztec/foundation/crypto'; import { type DebugLogger } from '@aztec/foundation/log'; @@ -98,43 +89,8 @@ export async function getSimulationProvider( return new WASMSimulator(); } -export const makeBloatedProcessedTx = (builderDb: MerkleTreeOperations, seed = 0x1) => { - seed *= MAX_NULLIFIERS_PER_TX; // Ensure no clashing given incremental seeds - const tx = mockTx(seed); - const kernelOutput = KernelCircuitPublicInputs.empty(); - kernelOutput.constants.vkTreeRoot = getVKTreeRoot(); - kernelOutput.constants.historicalHeader = builderDb.getInitialHeader(); - kernelOutput.end.publicDataUpdateRequests = makeTuple( - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10), i + 20), - seed + 0x500, - ); - kernelOutput.end.publicDataUpdateRequests = makeTuple( - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - i => new PublicDataUpdateRequest(fr(i), fr(i + 10), i + 20), - seed + 0x600, - ); - - const processedTx = makeProcessedTx(tx, kernelOutput, []); - - processedTx.data.end.noteHashes = makeTuple(MAX_NOTE_HASHES_PER_TX, fr, seed + 0x100); - processedTx.data.end.nullifiers = makeTuple(MAX_NULLIFIERS_PER_TX, fr, seed + 0x100000); - - processedTx.data.end.nullifiers[tx.data.forPublic!.end.nullifiers.length - 1] = Fr.zero(); - - processedTx.data.end.l2ToL1Msgs = makeTuple(MAX_L2_TO_L1_MSGS_PER_TX, makeScopedL2ToL1Message, seed + 0x300); - processedTx.noteEncryptedLogs.unrollLogs().forEach((log, i) => { - processedTx.data.end.noteEncryptedLogsHashes[i] = new LogHash(Fr.fromBuffer(log.hash()), 0, new Fr(log.length)); - }); - processedTx.encryptedLogs.unrollLogs().forEach((log, i) => { - processedTx.data.end.encryptedLogsHashes[i] = new ScopedLogHash( - new LogHash(Fr.fromBuffer(log.hash()), 0, new Fr(log.length)), - log.maskedContractAddress, - ); - }); - - return processedTx; -}; +export const makeBloatedProcessedTx = (builderDb: MerkleTreeOperations, seed = 0x1) => + makeBloatedProcessedTxWithVKRoot(builderDb, getVKTreeRoot(), seed); export const makeEmptyProcessedTx = (builderDb: MerkleTreeOperations, chainId: Fr, version: Fr) => { const header = builderDb.getInitialHeader(); diff --git a/yarn-project/prover-client/src/mocks/test_context.ts b/yarn-project/prover-client/src/mocks/test_context.ts index f5d9b78a8ac..989dd1edf48 100644 --- a/yarn-project/prover-client/src/mocks/test_context.ts +++ b/yarn-project/prover-client/src/mocks/test_context.ts @@ -32,7 +32,7 @@ import { tmpdir } from 'os'; import { join } from 'path'; import { TestCircuitProver } from '../../../bb-prover/src/test/test_circuit_prover.js'; -import { ProvingOrchestrator } from '../orchestrator/orchestrator.js'; +import { ProvingOrchestrator } from '../orchestrator/index.js'; import { MemoryProvingQueue } from '../prover-agent/memory-proving-queue.js'; import { ProverAgent } from '../prover-agent/prover-agent.js'; import { getEnvironmentConfig, getSimulationProvider, makeGlobals } from './fixtures.js'; diff --git a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts index 278e6f7a65e..9862ce40b22 100644 --- a/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts +++ b/yarn-project/prover-client/src/orchestrator/block-building-helpers.ts @@ -1,4 +1,4 @@ -import { MerkleTreeId, type ProcessedTx } from '@aztec/circuit-types'; +import { MerkleTreeId, type ProcessedTx, getTreeHeight } from '@aztec/circuit-types'; import { ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, @@ -8,9 +8,10 @@ import { type BlockRootOrBlockMergePublicInputs, BlockRootRollupInputs, ConstantRollupData, + ContentCommitment, Fr, type GlobalVariables, - type Header, + Header, KernelData, type L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, MAX_NULLIFIERS_PER_TX, @@ -28,6 +29,7 @@ import { PUBLIC_DATA_SUBTREE_HEIGHT, PUBLIC_DATA_SUBTREE_SIBLING_PATH_LENGTH, PUBLIC_DATA_TREE_HEIGHT, + type ParityPublicInputs, PartialStateReference, PreviousRollupBlockData, PreviousRollupData, @@ -40,13 +42,15 @@ import { type RootParityInput, RootRollupInputs, StateDiffHints, - type StateReference, + StateReference, VK_TREE_HEIGHT, type VerificationKeyAsFields, type VerificationKeyData, } from '@aztec/circuits.js'; import { assertPermutation, makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; +import { sha256Trunc } from '@aztec/foundation/crypto'; +import { type DebugLogger } from '@aztec/foundation/log'; import { type Tuple, assertLength, toFriendlyJSON } from '@aztec/foundation/serialize'; import { getVKIndex, getVKSiblingPath, getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; import { HintsBuilder, computeFeePayerBalanceLeafSlot } from '@aztec/simulator'; @@ -209,6 +213,40 @@ export function createBlockMergeRollupInputs( return mergeInputs; } +export function buildHeaderFromCircuitOutputs( + previousMergeData: [BaseOrMergeRollupPublicInputs, BaseOrMergeRollupPublicInputs], + parityPublicInputs: ParityPublicInputs, + rootRollupOutputs: BlockRootOrBlockMergePublicInputs, + l1ToL2TreeSnapshot: AppendOnlyTreeSnapshot, + logger?: DebugLogger, +) { + const contentCommitment = new ContentCommitment( + new Fr(previousMergeData[0].numTxs + previousMergeData[1].numTxs), + sha256Trunc( + Buffer.concat([previousMergeData[0].txsEffectsHash.toBuffer(), previousMergeData[1].txsEffectsHash.toBuffer()]), + ), + parityPublicInputs.shaRoot.toBuffer(), + sha256Trunc(Buffer.concat([previousMergeData[0].outHash.toBuffer(), previousMergeData[1].outHash.toBuffer()])), + ); + const state = new StateReference(l1ToL2TreeSnapshot, previousMergeData[1].end); + const header = new Header( + rootRollupOutputs.previousArchive, + contentCommitment, + state, + previousMergeData[0].constants.globalVariables, + previousMergeData[0].accumulatedFees.add(previousMergeData[1].accumulatedFees), + ); + if (!header.hash().equals(rootRollupOutputs.endBlockHash)) { + logger?.error( + `Block header mismatch when building header from circuit outputs.` + + `\n\nBuilt: ${toFriendlyJSON(header)}` + + `\n\nCircuit: ${toFriendlyJSON(rootRollupOutputs)}`, + ); + throw new Error(`Block header mismatch`); + } + return header; +} + // Validate that the roots of all local trees match the output of the root circuit simulation export async function validateBlockRootOutput( blockRootOutput: BlockRootOrBlockMergePublicInputs, @@ -238,6 +276,12 @@ export async function validateState(state: StateReference, db: MerkleTreeOperati ); } +export async function getRootTreeSiblingPath(treeId: TID, db: MerkleTreeOperations) { + const { size } = await db.getTreeInfo(treeId); + const path = await db.getSiblingPath(treeId, size); + return padArrayEnd(path.toFields(), Fr.ZERO, getTreeHeight(treeId)); +} + // Builds the inputs for the block root rollup circuit, without making any changes to trees export async function getBlockRootRollupInput( rollupOutputLeft: BaseOrMergeRollupPublicInputs, @@ -258,21 +302,9 @@ export async function getBlockRootRollupInput( getPreviousRollupDataFromPublicInputs(rollupOutputRight, rollupProofRight, verificationKeyRight), ]; - const getRootTreeSiblingPath = async (treeId: MerkleTreeId) => { - const { size } = await db.getTreeInfo(treeId); - const path = await db.getSiblingPath(treeId, size); - return path.toFields(); - }; - // Get blocks tree const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); - const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE); - - const newArchiveSiblingPath = makeTuple( - ARCHIVE_HEIGHT, - i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO), - 0, - ); + const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, db); return BlockRootRollupInputs.from({ previousRollupData, diff --git a/yarn-project/prover-client/src/orchestrator/orchestrator.ts b/yarn-project/prover-client/src/orchestrator/orchestrator.ts index b0ea8d36e60..193b8a8dac4 100644 --- a/yarn-project/prover-client/src/orchestrator/orchestrator.ts +++ b/yarn-project/prover-client/src/orchestrator/orchestrator.ts @@ -31,10 +31,8 @@ import { type BaseOrMergeRollupPublicInputs, BaseParityInputs, type BaseRollupInputs, - ContentCommitment, Fr, type GlobalVariables, - Header, type KernelCircuitPublicInputs, L1_TO_L2_MSG_SUBTREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, @@ -48,7 +46,6 @@ import { type RecursiveProof, type RootParityInput, RootParityInputs, - StateReference, type TUBE_PROOF_LENGTH, TubeInputs, type VerificationKeyAsFields, @@ -58,7 +55,6 @@ import { } from '@aztec/circuits.js'; import { makeTuple } from '@aztec/foundation/array'; import { padArrayEnd } from '@aztec/foundation/collection'; -import { sha256Trunc } from '@aztec/foundation/crypto'; import { AbortError } from '@aztec/foundation/error'; import { createDebugLogger } from '@aztec/foundation/log'; import { promiseWithResolvers } from '@aztec/foundation/promise'; @@ -73,6 +69,7 @@ import { inspect } from 'util'; import { buildBaseRollupInput, + buildHeaderFromCircuitOutputs, createMergeRollupInputs, getBlockRootRollupInput, getSubtreeSiblingPath, @@ -416,29 +413,14 @@ export class ProvingOrchestrator implements BlockProver { throw new Error(`Invalid proving state, final merge inputs before block root circuit missing.`); } - const contentCommitment = new ContentCommitment( - new Fr(previousMergeData[0].numTxs + previousMergeData[1].numTxs), - sha256Trunc( - Buffer.concat([previousMergeData[0].txsEffectsHash.toBuffer(), previousMergeData[1].txsEffectsHash.toBuffer()]), - ), - this.provingState.finalRootParityInput.publicInputs.shaRoot.toBuffer(), - sha256Trunc(Buffer.concat([previousMergeData[0].outHash.toBuffer(), previousMergeData[1].outHash.toBuffer()])), - ); - const state = new StateReference( - await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.db), - previousMergeData[1].end, - ); - const header = new Header( - rootRollupOutputs.previousArchive, - contentCommitment, - state, - previousMergeData[0].constants.globalVariables, - previousMergeData[0].accumulatedFees.add(previousMergeData[1].accumulatedFees), + const l1ToL2TreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.db); + + return buildHeaderFromCircuitOutputs( + [previousMergeData[0], previousMergeData[1]], + this.provingState.finalRootParityInput.publicInputs, + rootRollupOutputs, + l1ToL2TreeSnapshot, ); - if (!header.hash().equals(rootRollupOutputs.endBlockHash)) { - throw new Error(`Block header mismatch in finalise.`); - } - return header; } /** diff --git a/yarn-project/sequencer-client/src/block_builder/index.ts b/yarn-project/sequencer-client/src/block_builder/index.ts index 0b32950acda..077b1049650 100644 --- a/yarn-project/sequencer-client/src/block_builder/index.ts +++ b/yarn-project/sequencer-client/src/block_builder/index.ts @@ -1,51 +1,7 @@ -import { TestCircuitProver } from '@aztec/bb-prover'; -import { - type BlockSimulator, - type MerkleTreeOperations, - type ProcessedTx, - type ProvingTicket, - type SimulationBlockResult, -} from '@aztec/circuit-types'; -import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; -import { ProvingOrchestrator } from '@aztec/prover-client/orchestrator'; -import { type SimulationProvider } from '@aztec/simulator'; -import { type TelemetryClient } from '@aztec/telemetry-client'; -import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { type BlockSimulator, type MerkleTreeOperations } from '@aztec/circuit-types'; -/** - * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. - * This class is temporary and should die once we switch from tx effects to tx objects submissions, since sequencers won't have - * the need to create L2 block headers to submit to L1. When we do that, we should also remove the references to the - * prover-client and bb-prover packages from this package. - */ -export class BlockBuilder implements BlockSimulator { - private orchestrator: ProvingOrchestrator; - constructor(db: MerkleTreeOperations, simulationProvider: SimulationProvider, telemetry: TelemetryClient) { - const testProver = new TestCircuitProver(telemetry, simulationProvider); - this.orchestrator = new ProvingOrchestrator(db, testProver, telemetry); - } - - startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { - return this.orchestrator.startNewBlock(numTxs, globalVariables, l1ToL2Messages); - } - cancelBlock(): void { - this.orchestrator.cancelBlock(); - } - finaliseBlock(): Promise { - return this.orchestrator.finaliseBlock(); - } - setBlockCompleted(): Promise { - return this.orchestrator.setBlockCompleted(); - } - addNewTx(tx: ProcessedTx): Promise { - return this.orchestrator.addNewTx(tx); - } -} - -export class BlockBuilderFactory { - constructor(private simulationProvider: SimulationProvider, private telemetry?: TelemetryClient) {} - - create(db: MerkleTreeOperations): BlockSimulator { - return new BlockBuilder(db, this.simulationProvider, this.telemetry ?? new NoopTelemetryClient()); - } +export * from './orchestrator.js'; +export * from './light.js'; +export interface BlockBuilderFactory { + create(db: MerkleTreeOperations): BlockSimulator; } diff --git a/yarn-project/sequencer-client/src/block_builder/light.test.ts b/yarn-project/sequencer-client/src/block_builder/light.test.ts new file mode 100644 index 00000000000..25c041b4fa8 --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/light.test.ts @@ -0,0 +1,303 @@ +import { TestCircuitProver } from '@aztec/bb-prover'; +import { + MerkleTreeId, + type MerkleTreeOperations, + type ProcessedTx, + type ServerCircuitProver, + makeEmptyProcessedTx, +} from '@aztec/circuit-types'; +import { makeBloatedProcessedTx } from '@aztec/circuit-types/test'; +import { + type AppendOnlyTreeSnapshot, + type BaseOrMergeRollupPublicInputs, + BaseParityInputs, + BlockRootRollupInputs, + Fr, + type GlobalVariables, + L1_TO_L2_MSG_SUBTREE_HEIGHT, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + type MembershipWitness, + MergeRollupInputs, + NESTED_RECURSIVE_PROOF_LENGTH, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NUM_BASE_PARITY_PER_ROOT_PARITY, + type ParityPublicInputs, + PreviousRollupData, + type RecursiveProof, + RootParityInput, + RootParityInputs, + VerificationKeyData, + makeEmptyRecursiveProof, +} from '@aztec/circuits.js'; +import { makeGlobalVariables } from '@aztec/circuits.js/testing'; +import { padArrayEnd, times } from '@aztec/foundation/collection'; +import { type DebugLogger, createDebugLogger } from '@aztec/foundation/log'; +import { type Tuple, assertLength } from '@aztec/foundation/serialize'; +import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; +import { + buildBaseRollupInput, + buildHeaderFromCircuitOutputs, + getRootTreeSiblingPath, + getSubtreeSiblingPath, + getTreeSnapshot, + makeEmptyMembershipWitness, +} from '@aztec/prover-client/helpers'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; +import { MerkleTrees } from '@aztec/world-state'; + +import { LightweightBlockBuilder } from './light.js'; + +describe('LightBlockBuilder', () => { + let simulator: ServerCircuitProver; + let logger: DebugLogger; + let globals: GlobalVariables; + let l1ToL2Messages: Fr[]; + let vkRoot: Fr; + + let db: MerkleTreeOperations; + let expectsDb: MerkleTreeOperations; + let builder: LightweightBlockBuilder; + + let emptyProof: RecursiveProof; + let emptyVk: VerificationKeyData; + let emptyVkWitness: MembershipWitness<5>; + + beforeAll(() => { + logger = createDebugLogger('aztec:sequencer-client:test:block-builder'); + simulator = new TestCircuitProver(new NoopTelemetryClient()); + vkRoot = getVKTreeRoot(); + emptyProof = makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH); + emptyVk = VerificationKeyData.makeFake(); + emptyVkWitness = makeEmptyMembershipWitness(5); + }); + + beforeEach(async () => { + globals = makeGlobalVariables(1, { chainId: Fr.ZERO, version: Fr.ZERO }); + l1ToL2Messages = times(7, i => new Fr(i + 1)); + db = await MerkleTrees.tmp().then(t => t.asLatest()); + expectsDb = await MerkleTrees.tmp().then(t => t.asLatest()); + builder = new LightweightBlockBuilder(db, new NoopTelemetryClient()); + }); + + it('builds a 2 tx header', async () => { + const txs = times(2, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 3 tx header', async () => { + const txs = times(3, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const merge = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + return Promise.resolve([merge, rollupOutputs[2]]); + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 4 tx header', async () => { + const txs = times(4, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); + return [mergeLeft, mergeRight]; + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 4 tx header with no l1 to l2 messages', async () => { + const l1ToL2Messages: Fr[] = []; + const txs = times(4, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const mergeLeft = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + const mergeRight = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); + return [mergeLeft, mergeRight]; + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a 5 tx header', async () => { + const txs = times(5, makeTx); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages, async rollupOutputs => { + const merge10 = await getMergeOutput(rollupOutputs[0], rollupOutputs[1]); + const merge11 = await getMergeOutput(rollupOutputs[2], rollupOutputs[3]); + const merge20 = await getMergeOutput(merge10, merge11); + return [merge20, rollupOutputs[4]]; + }); + + expect(header).toEqual(expectedHeader); + }); + + it('builds a single tx header', async () => { + const txs = times(1, i => makeBloatedProcessedTx(db, vkRoot, i)); + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); + + expect(header).toEqual(expectedHeader); + }); + + it('builds an empty header', async () => { + const txs: ProcessedTx[] = []; + const header = await buildHeader(txs, l1ToL2Messages); + + const expectedHeader = await buildExpectedHeader(txs, l1ToL2Messages); + + expect(header).toEqual(expectedHeader); + }); + + // Makes a tx with a non-zero inclusion fee for testing + const makeTx = (i: number) => makeBloatedProcessedTx(db, vkRoot, i, { inclusionFee: new Fr(i) }); + + // Builds the block header using the ts block builder + const buildHeader = async (txs: ProcessedTx[], l1ToL2Messages: Fr[]) => { + const txCount = Math.max(2, txs.length); + await builder.startNewBlock(txCount, globals, l1ToL2Messages); + for (const tx of txs) { + await builder.addNewTx(tx); + } + await builder.setBlockCompleted(); + const result = await builder.finaliseBlock(); + return result.block.header; + }; + + // Builds the block header using circuit outputs + // Requires a callback for manually assembling the merge rollup tree + const buildExpectedHeader = async ( + txs: ProcessedTx[], + l1ToL2Messages: Fr[], + getTopMerges?: ( + rollupOutputs: BaseOrMergeRollupPublicInputs[], + ) => Promise<[BaseOrMergeRollupPublicInputs, BaseOrMergeRollupPublicInputs]>, + ) => { + if (txs.length <= 2) { + // Pad if we don't have enough txs + txs = [ + ...txs, + ...times(2 - txs.length, () => + makeEmptyProcessedTx(expectsDb.getInitialHeader(), globals.chainId, globals.version, vkRoot), + ), + ]; + // No need to run a merge if there's 0-2 txs + getTopMerges = rollupOutputs => Promise.resolve([rollupOutputs[0], rollupOutputs[1]]); + } + + const rollupOutputs = await getRollupOutputs(txs); + const [mergeLeft, mergeRight] = await getTopMerges!(rollupOutputs); + const l1ToL2Snapshot = await getL1ToL2Snapshot(l1ToL2Messages); + const parityOutput = await getParityOutput(l1ToL2Messages); + const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsDb); + const rootOutput = await getBlockRootOutput(mergeLeft, mergeRight, parityOutput, l1ToL2Snapshot); + const expectedHeader = buildHeaderFromCircuitOutputs( + [mergeLeft, mergeRight], + parityOutput, + rootOutput, + messageTreeSnapshot, + logger, + ); + + expect(expectedHeader.hash()).toEqual(rootOutput.endBlockHash); + return expectedHeader; + }; + + const getL1ToL2Snapshot = async (msgs: Fr[]) => { + const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + + const newL1ToL2MessageTreeRootSiblingPath = padArrayEnd( + await getSubtreeSiblingPath(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, L1_TO_L2_MSG_SUBTREE_HEIGHT, expectsDb), + Fr.ZERO, + L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, + ); + + const messageTreeSnapshot = await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, expectsDb); + return { messageTreeSnapshot, newL1ToL2MessageTreeRootSiblingPath, l1ToL2Messages }; + }; + + const getRollupOutputs = async (txs: ProcessedTx[]) => { + const rollupOutputs = []; + for (const tx of txs) { + const inputs = await buildBaseRollupInput(tx, emptyProof, globals, expectsDb, emptyVk); + const result = await simulator.getBaseRollupProof(inputs); + rollupOutputs.push(result.inputs); + } + return rollupOutputs; + }; + + const getMergeOutput = async (left: BaseOrMergeRollupPublicInputs, right: BaseOrMergeRollupPublicInputs) => { + const leftInput = new PreviousRollupData(left, emptyProof, emptyVk.keyAsFields, emptyVkWitness); + const rightInput = new PreviousRollupData(right, emptyProof, emptyVk.keyAsFields, emptyVkWitness); + const inputs = new MergeRollupInputs([leftInput, rightInput]); + const result = await simulator.getMergeRollupProof(inputs); + return result.inputs; + }; + + const getParityOutput = async (msgs: Fr[]) => { + const l1ToL2Messages = padArrayEnd(msgs, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + await expectsDb.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, l1ToL2Messages); + + const rootParityInputs: RootParityInput[] = []; + for (let i = 0; i < NUM_BASE_PARITY_PER_ROOT_PARITY; i++) { + const input = BaseParityInputs.fromSlice(l1ToL2Messages, i, vkRoot); + const { publicInputs } = await simulator.getBaseParityProof(input); + const rootInput = new RootParityInput(emptyProof, emptyVk.keyAsFields, emptyVkWitness.siblingPath, publicInputs); + rootParityInputs.push(rootInput); + } + + const rootParityInput = new RootParityInputs(assertLength(rootParityInputs, NUM_BASE_PARITY_PER_ROOT_PARITY)); + const result = await simulator.getRootParityProof(rootParityInput); + return result.publicInputs; + }; + + const getBlockRootOutput = async ( + left: BaseOrMergeRollupPublicInputs, + right: BaseOrMergeRollupPublicInputs, + parityOutput: ParityPublicInputs, + l1ToL2Snapshot: { + l1ToL2Messages: Tuple; + newL1ToL2MessageTreeRootSiblingPath: Tuple; + messageTreeSnapshot: AppendOnlyTreeSnapshot; + }, + ) => { + const rollupLeft = new PreviousRollupData(left, emptyProof, emptyVk.keyAsFields, emptyVkWitness); + const rollupRight = new PreviousRollupData(right, emptyProof, emptyVk.keyAsFields, emptyVkWitness); + const startArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE, expectsDb); + const newArchiveSiblingPath = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE, expectsDb); + const previousBlockHashLeafIndex = BigInt(startArchiveSnapshot.nextAvailableLeafIndex - 1); + const previousBlockHash = (await expectsDb.getLeafValue(MerkleTreeId.ARCHIVE, previousBlockHashLeafIndex))!; + + const rootParityInput = new RootParityInput( + emptyProof, + emptyVk.keyAsFields, + emptyVkWitness.siblingPath, + parityOutput, + ); + + const inputs = BlockRootRollupInputs.from({ + previousRollupData: [rollupLeft, rollupRight], + l1ToL2Roots: rootParityInput, + newL1ToL2Messages: l1ToL2Snapshot.l1ToL2Messages, + newL1ToL2MessageTreeRootSiblingPath: l1ToL2Snapshot.newL1ToL2MessageTreeRootSiblingPath, + startL1ToL2MessageTreeSnapshot: l1ToL2Snapshot.messageTreeSnapshot, + startArchiveSnapshot, + newArchiveSiblingPath, + previousBlockHash, + proverId: Fr.ZERO, + }); + + const result = await simulator.getBlockRootRollupProof(inputs); + return result.inputs; + }; +}); diff --git a/yarn-project/sequencer-client/src/block_builder/light.ts b/yarn-project/sequencer-client/src/block_builder/light.ts new file mode 100644 index 00000000000..2dbbdb75abe --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/light.ts @@ -0,0 +1,164 @@ +import { createDebugLogger } from '@aztec/aztec.js'; +import { + type BlockSimulator, + Body, + L2Block, + MerkleTreeId, + type MerkleTreeOperations, + PROVING_STATUS, + type ProcessedTx, + type ProvingTicket, + type SimulationBlockResult, + TxEffect, + makeEmptyProcessedTx, + toTxEffect, +} from '@aztec/circuit-types'; +import { + ContentCommitment, + Fr, + type GlobalVariables, + Header, + MerkleTreeCalculator, + NESTED_RECURSIVE_PROOF_LENGTH, + NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, + NUM_BASE_PARITY_PER_ROOT_PARITY, + NUM_MSGS_PER_BASE_PARITY, + PartialStateReference, + StateReference, + VerificationKeyData, + makeEmptyRecursiveProof, +} from '@aztec/circuits.js'; +import { padArrayEnd } from '@aztec/foundation/collection'; +import { sha256Trunc } from '@aztec/foundation/crypto'; +import { computeUnbalancedMerkleRoot } from '@aztec/foundation/trees'; +import { getVKTreeRoot } from '@aztec/noir-protocol-circuits-types'; +import { buildBaseRollupInput, getTreeSnapshot } from '@aztec/prover-client/helpers'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +/** + * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. + * This class is temporary and should die once we switch from tx effects to tx objects submissions, since sequencers won't have + * the need to create L2 block headers to submit to L1. When we do that, we should also remove the references to the + * prover-client and bb-prover packages from this package. + */ +export class LightweightBlockBuilder implements BlockSimulator { + private numTxs?: number; + private globalVariables?: GlobalVariables; + private l1ToL2Messages?: Fr[]; + + private readonly txs: ProcessedTx[] = []; + + private readonly logger = createDebugLogger('aztec:sequencer-client:block_builder_light'); + + constructor(private db: MerkleTreeOperations, private telemetry: TelemetryClient) {} + + async startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { + this.logger.verbose('Starting new block', { numTxs, globalVariables, l1ToL2Messages }); + this.numTxs = numTxs; + this.globalVariables = globalVariables; + this.l1ToL2Messages = padArrayEnd(l1ToL2Messages, Fr.ZERO, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP); + + // Update L1 to L2 tree + await this.db.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, this.l1ToL2Messages!); + + // Nothing to prove, so we return an already resolved promise + return { provingPromise: Promise.resolve({ status: PROVING_STATUS.SUCCESS }) }; + } + + async addNewTx(tx: ProcessedTx): Promise { + this.logger.verbose('Adding new tx to block', { txHash: tx.hash.toString() }); + this.txs.push(tx); + await buildBaseRollupInput( + tx, + makeEmptyRecursiveProof(NESTED_RECURSIVE_PROOF_LENGTH), + this.globalVariables!, + this.db, + VerificationKeyData.makeFake(), + ); + } + + cancelBlock(): void {} + + async setBlockCompleted(): Promise { + const paddingTxCount = this.numTxs! - this.txs.length; + this.logger.verbose(`Setting block as completed and adding ${paddingTxCount} padding txs`); + for (let i = 0; i < paddingTxCount; i++) { + await this.addNewTx( + makeEmptyProcessedTx( + this.db.getInitialHeader(), + this.globalVariables!.chainId, + this.globalVariables!.version, + getVKTreeRoot(), + ), + ); + } + } + + async finaliseBlock(): Promise { + this.logger.verbose(`Finalising block`); + const nonEmptyTxEffects: TxEffect[] = this.txs + .map(tx => toTxEffect(tx, this.globalVariables!.gasFees)) + .filter(txEffect => !txEffect.isEmpty()); + const body = new Body(nonEmptyTxEffects); + const header = await this.makeHeader(body); + + await this.db.updateArchive(header); + const newArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, this.db); + + const block = new L2Block(newArchive, header, body); + return { block }; + } + + private async makeHeader(body: Body): Promise
{ + const { db } = this; + + const stateReference = new StateReference( + await getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGE_TREE, db), + new PartialStateReference( + await getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE, db), + await getTreeSnapshot(MerkleTreeId.NULLIFIER_TREE, db), + await getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE, db), + ), + ); + + const previousArchive = await getTreeSnapshot(MerkleTreeId.ARCHIVE, db); + + const outHash = computeUnbalancedMerkleRoot( + body.txEffects.map(tx => tx.txOutHash()), + TxEffect.empty().txOutHash(), + ); + + const paritySize = NUM_BASE_PARITY_PER_ROOT_PARITY * NUM_MSGS_PER_BASE_PARITY; + const parityHeight = Math.ceil(Math.log2(paritySize)); + const hasher = (left: Buffer, right: Buffer) => sha256Trunc(Buffer.concat([left, right])); + const parityShaRoot = new MerkleTreeCalculator(parityHeight, Fr.ZERO.toBuffer(), hasher).computeTreeRoot( + this.l1ToL2Messages!.map(msg => msg.toBuffer()), + ); + + const contentCommitment = new ContentCommitment( + new Fr(this.numTxs!), + body.getTxsEffectsHash(), + parityShaRoot, + outHash, + ); + + const fees = this.txs!.reduce( + (acc, tx) => + acc + .add(tx.data.constants.txContext.gasSettings.inclusionFee) + .add(tx.data.end.gasUsed.computeFee(this.globalVariables!.gasFees)), + Fr.ZERO, + ); + + return new Header(previousArchive, contentCommitment, stateReference, this.globalVariables!, fees); + } +} + +export class LightweightBlockBuilderFactory { + constructor(private telemetry?: TelemetryClient) {} + + create(db: MerkleTreeOperations): BlockSimulator { + return new LightweightBlockBuilder(db, this.telemetry ?? new NoopTelemetryClient()); + } +} diff --git a/yarn-project/sequencer-client/src/block_builder/orchestrator.ts b/yarn-project/sequencer-client/src/block_builder/orchestrator.ts new file mode 100644 index 00000000000..c415e791342 --- /dev/null +++ b/yarn-project/sequencer-client/src/block_builder/orchestrator.ts @@ -0,0 +1,51 @@ +import { TestCircuitProver } from '@aztec/bb-prover'; +import { + type BlockSimulator, + type MerkleTreeOperations, + type ProcessedTx, + type ProvingTicket, + type SimulationBlockResult, +} from '@aztec/circuit-types'; +import { type Fr, type GlobalVariables } from '@aztec/circuits.js'; +import { ProvingOrchestrator } from '@aztec/prover-client/orchestrator'; +import { type SimulationProvider } from '@aztec/simulator'; +import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; + +/** + * Implements a block simulator using a test circuit prover under the hood, which just simulates circuits and outputs empty proofs. + * This class is temporary and should die once we switch from tx effects to tx objects submissions, since sequencers won't have + * the need to create L2 block headers to submit to L1. When we do that, we should also remove the references to the + * prover-client and bb-prover packages from this package. + */ +export class OrchestratorBlockBuilder implements BlockSimulator { + private orchestrator: ProvingOrchestrator; + constructor(db: MerkleTreeOperations, simulationProvider: SimulationProvider, telemetry: TelemetryClient) { + const testProver = new TestCircuitProver(telemetry, simulationProvider); + this.orchestrator = new ProvingOrchestrator(db, testProver, telemetry); + } + + startNewBlock(numTxs: number, globalVariables: GlobalVariables, l1ToL2Messages: Fr[]): Promise { + return this.orchestrator.startNewBlock(numTxs, globalVariables, l1ToL2Messages); + } + cancelBlock(): void { + this.orchestrator.cancelBlock(); + } + finaliseBlock(): Promise { + return this.orchestrator.finaliseBlock(); + } + setBlockCompleted(): Promise { + return this.orchestrator.setBlockCompleted(); + } + addNewTx(tx: ProcessedTx): Promise { + return this.orchestrator.addNewTx(tx); + } +} + +export class OrchestratorBlockBuilderFactory { + constructor(private simulationProvider: SimulationProvider, private telemetry?: TelemetryClient) {} + + create(db: MerkleTreeOperations): BlockSimulator { + return new OrchestratorBlockBuilder(db, this.simulationProvider, this.telemetry ?? new NoopTelemetryClient()); + } +} diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index 4ab745fc920..cb04366c9b3 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -6,7 +6,7 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type ValidatorClient } from '@aztec/validator-client'; -import { BlockBuilderFactory } from '../block_builder/index.js'; +import { OrchestratorBlockBuilderFactory } from '../block_builder/index.js'; import { type SequencerClientConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; @@ -60,7 +60,7 @@ export class SequencerClient { globalsBuilder, p2pClient, worldStateSynchronizer, - new BlockBuilderFactory(simulationProvider, telemetryClient), + new OrchestratorBlockBuilderFactory(simulationProvider, telemetryClient), l2BlockSource, l1ToL2MessageSource, publicProcessorFactory, diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index f94cbec7dde..15df42917e7 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -35,6 +35,7 @@ import { SerialQueue } from '@aztec/foundation/queue'; import { Timer, elapsed } from '@aztec/foundation/timer'; import { type IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { type AztecKVStore, type AztecSingleton } from '@aztec/kv-store'; +import { openTmpStore } from '@aztec/kv-store/utils'; import { type AppendOnlyTree, type IndexedTree, @@ -47,6 +48,7 @@ import { newTree, } from '@aztec/merkle-tree'; import { type TelemetryClient } from '@aztec/telemetry-client'; +import { NoopTelemetryClient } from '@aztec/telemetry-client/noop'; import { type Hasher } from '@aztec/types/interfaces'; import { @@ -119,6 +121,14 @@ export class MerkleTrees implements MerkleTreeDb { return merkleTrees; } + /** + * Creates a temporary store. Useful for testing. + */ + public static tmp() { + const store = openTmpStore(); + return MerkleTrees.new(store, new NoopTelemetryClient()); + } + /** * Initializes the collection of Merkle Trees. */ From 7967c6fbc3fa2c39e42a6a9e3a7ccd4bf1cc7f8f Mon Sep 17 00:00:00 2001 From: Santiago Palladino Date: Fri, 20 Sep 2024 17:47:32 -0300 Subject: [PATCH 2/2] feat: Use light builder in sequencer --- .../end-to-end/src/e2e_block_building.test.ts | 32 +++++++++++++++++-- .../src/client/sequencer-client.ts | 4 +-- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/yarn-project/end-to-end/src/e2e_block_building.test.ts b/yarn-project/end-to-end/src/e2e_block_building.test.ts index f19e5b05136..0cb5ea031b5 100644 --- a/yarn-project/end-to-end/src/e2e_block_building.test.ts +++ b/yarn-project/end-to-end/src/e2e_block_building.test.ts @@ -16,7 +16,7 @@ import { } from '@aztec/aztec.js'; import { times } from '@aztec/foundation/collection'; import { poseidon2HashWithSeparator } from '@aztec/foundation/crypto'; -import { StatefulTestContractArtifact } from '@aztec/noir-contracts.js'; +import { StatefulTestContract, StatefulTestContractArtifact } from '@aztec/noir-contracts.js'; import { TestContract } from '@aztec/noir-contracts.js/Test'; import { TokenContract } from '@aztec/noir-contracts.js/Token'; @@ -89,7 +89,35 @@ describe('e2e_block_building', () => { expect(areDeployed).toEqual(times(TX_COUNT, () => true)); }); - it.skip('can call public function from different tx in same block', async () => { + it('assembles a block with multiple txs with public fns', async () => { + // First deploy the contract + const ownerAddress = owner.getCompleteAddress().address; + const contract = await StatefulTestContract.deploy(owner, ownerAddress, ownerAddress, 1).send().deployed(); + + // Assemble N contract deployment txs + // We need to create them sequentially since we cannot have parallel calls to a circuit + const TX_COUNT = 8; + await aztecNode.setConfig({ minTxsPerBlock: TX_COUNT }); + + const methods = times(TX_COUNT, i => contract.methods.increment_public_value(ownerAddress, i)); + for (let i = 0; i < TX_COUNT; i++) { + await methods[i].create({}); + await methods[i].prove({}); + } + + // Send them simultaneously to be picked up by the sequencer + const txs = await Promise.all(methods.map(method => method.send())); + logger.info(`Txs sent with hashes: `); + for (const tx of txs) { + logger.info(` ${await tx.getTxHash()}`); + } + + // Await txs to be mined and assert they are all mined on the same block + const receipts = await Promise.all(txs.map(tx => tx.wait())); + expect(receipts.map(r => r.blockNumber)).toEqual(times(TX_COUNT, () => receipts[0].blockNumber)); + }); + + it.skip('can call public function from different tx in same block as deployed', async () => { // Ensure both txs will land on the same block await aztecNode.setConfig({ minTxsPerBlock: 2 }); diff --git a/yarn-project/sequencer-client/src/client/sequencer-client.ts b/yarn-project/sequencer-client/src/client/sequencer-client.ts index cb04366c9b3..0fade8075cd 100644 --- a/yarn-project/sequencer-client/src/client/sequencer-client.ts +++ b/yarn-project/sequencer-client/src/client/sequencer-client.ts @@ -6,7 +6,7 @@ import { type TelemetryClient } from '@aztec/telemetry-client'; import { type ContractDataSource } from '@aztec/types/contracts'; import { type ValidatorClient } from '@aztec/validator-client'; -import { OrchestratorBlockBuilderFactory } from '../block_builder/index.js'; +import { LightweightBlockBuilderFactory } from '../block_builder/index.js'; import { type SequencerClientConfig } from '../config.js'; import { GlobalVariableBuilder } from '../global_variable_builder/index.js'; import { L1Publisher } from '../publisher/index.js'; @@ -60,7 +60,7 @@ export class SequencerClient { globalsBuilder, p2pClient, worldStateSynchronizer, - new OrchestratorBlockBuilderFactory(simulationProvider, telemetryClient), + new LightweightBlockBuilderFactory(telemetryClient), l2BlockSource, l1ToL2MessageSource, publicProcessorFactory,